pax_global_header00006660000000000000000000000064144714236560014526gustar00rootroot0000000000000052 comment=a7af6136ae969422e8a7af94fbf8497dcb29a474 jfroy-aiobafi6-311c9f2/000077500000000000000000000000001447142365600146675ustar00rootroot00000000000000jfroy-aiobafi6-311c9f2/.clabot000066400000000000000000000000521447142365600161310ustar00rootroot00000000000000{ "contributors": ["jfroy", "bdraco"] } jfroy-aiobafi6-311c9f2/.github/000077500000000000000000000000001447142365600162275ustar00rootroot00000000000000jfroy-aiobafi6-311c9f2/.github/dependabot.yml000066400000000000000000000006561447142365600210660ustar00rootroot00000000000000# To get started with Dependabot version updates, you'll need to specify which # package ecosystems to update and where the package manifests are located. # Please see the documentation for all configuration options: # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates version: 2 updates: - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" jfroy-aiobafi6-311c9f2/.github/workflows/000077500000000000000000000000001447142365600202645ustar00rootroot00000000000000jfroy-aiobafi6-311c9f2/.github/workflows/ci.yaml000066400000000000000000000021621447142365600215440ustar00rootroot00000000000000# Based on https://github.com/snok/install-poetry name: CI on: push jobs: test: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v3 with: fetch-depth: 1 - name: Set up Python 3.9 id: setup-python uses: actions/setup-python@v4 with: python-version: 3.9 - name: Install Poetry uses: snok/install-poetry@v1 with: virtualenvs-create: true virtualenvs-in-project: true installer-parallel: true - name: Load cached venv id: cached-poetry-dependencies uses: actions/cache@v3 with: path: .venv key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - name: Install dependencies if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' run: poetry install --no-interaction --no-root - name: Install project run: poetry install --no-interaction - name: Run pytest run: | source .venv/bin/activate pytest . jfroy-aiobafi6-311c9f2/.github/workflows/codeql-analysis.yml000066400000000000000000000052501447142365600241010ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: [ "main" ] pull_request: # The branches below must be a subset of the branches above branches: [ "main" ] schedule: - cron: '23 17 * * 2' jobs: analyze: name: Analyze runs-on: ubuntu-latest permissions: actions: read contents: read security-events: write strategy: fail-fast: false matrix: language: [ 'python' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support steps: - name: Checkout repository uses: actions/checkout@v3 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # queries: security-extended,security-and-quality # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v2 # ℹ️ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun # If the Autobuild fails above, remove it and uncomment the following three lines. # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. # - run: | # echo "Run, Build Application using script" # ./location_of_script_within_repo/buildscript.sh - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 jfroy-aiobafi6-311c9f2/.gitignore000066400000000000000000000000561447142365600166600ustar00rootroot00000000000000/.venv/ /.vscode/ /dist/ /dumps/ __pycache__/ jfroy-aiobafi6-311c9f2/CLA.md000066400000000000000000000030671447142365600156160ustar00rootroot00000000000000# Contributor License Agreement ``` By making a contribution to this project, I certify that: (a) The contribution was created in whole or in part by me and I have the right to submit it under the Apache 2.0 license; or (b) The contribution is based upon previous work that, to the best of my knowledge, is covered under an appropriate open source license and I have the right under that license to submit that work with modifications, whether created in whole or in part by me, under the Apache 2.0 license; or (c) The contribution was provided directly to me by some other person who certified (a), (b) or (c) and I have not modified it. (d) I understand and agree that this project and the contribution are public and that a record of the contribution (including all personal information I submit with it) is maintained indefinitely and may be redistributed consistent with this project or the open source license(s) involved. ``` ## Attribution The text of this license is available under the [Creative Commons Attribution-ShareAlike 3.0 Unported License](http://creativecommons.org/licenses/by-sa/3.0/). It is based on the Linux [Developer Certificate Of Origin](http://elinux.org/Developer_Certificate_Of_Origin), but is modified to explicitly use the Apache 2.0 license and not mention sign-off. ## Signing To sign this CLA you must first submit a pull request to a repository under the jfroy organization. ## Adoption This Contributor License Agreement (CLA) was first announced on June 28th, 2023 and adopted June 28th, 2023. jfroy-aiobafi6-311c9f2/LICENSE000066400000000000000000000261351447142365600157030ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.jfroy-aiobafi6-311c9f2/README.md000066400000000000000000000031321447142365600161450ustar00rootroot00000000000000# aiobafi6 [![PyPI version](https://badge.fury.io/py/aiobafi6.svg)](https://badge.fury.io/py/aiobafi6) [![Downloads](https://pepy.tech/badge/aiobafi6)](https://pepy.tech/project/aiobafi6) aiobafi6 is a python library to discovery, query and control [Big Ass Fans](https://bigassfans.com) products that use the i6 protocol, which includes i6 fans and Haiku fans with the 3.0 firmware. It supports almost all the features of the previous protocol ("SenseMe"), with the exception of rooms, and sleep mode. Occupancy support was added in the 3.1 firmware. ## Command line The aiobafi6 package comes with a minimal command line (`aiobafi6`) that uses either the library or direct communication with a target device. It is useful for debugging and interacting with the firmware. Run with `--help` for usage. ## Compiling the aiobafi6 protocol buffer The BAF i6 protocol uses [protocol buffers](https://developers.google.com/protocol-buffers) for message wire serialization. This library maintains a [single proto file](proto/aiobafi6.proto) with all known messages and constants. The generated Python client for this proto file is checked in the repo to avoid depending on the protocol buffer compiler for installation. Whenever the source proto file is changed, the Python client files must be re-generated. `poe protoc` ## Special thanks [@bdraco](https://github.com/bdraco) for writing the HASS integration, helping with Python, and suggesting BAF is using protobufs. [@oogje](https://github.com/oogje) for a reference homebridge implementation. [Big Ass Fans](https://www.bigassfans.com) for making great products. jfroy-aiobafi6-311c9f2/aiobafi6/000077500000000000000000000000001447142365600163475ustar00rootroot00000000000000jfroy-aiobafi6-311c9f2/aiobafi6/__init__.py000066400000000000000000000006351447142365600204640ustar00rootroot00000000000000"""aiobafi6""" from .const import MIN_API_VERSION from .device import Device from .discovery import PORT, ZEROCONF_SERVICE_TYPE, Service, ServiceBrowser from .exceptions import DeviceUUIDMismatchError from .protoprop import OffOnAuto __all__ = ( "MIN_API_VERSION", "PORT", "ZEROCONF_SERVICE_TYPE", "Device", "DeviceUUIDMismatchError", "OffOnAuto", "Service", "ServiceBrowser", ) jfroy-aiobafi6-311c9f2/aiobafi6/cmd/000077500000000000000000000000001447142365600171125ustar00rootroot00000000000000jfroy-aiobafi6-311c9f2/aiobafi6/cmd/__init__.py000066400000000000000000000000001447142365600212110ustar00rootroot00000000000000jfroy-aiobafi6-311c9f2/aiobafi6/cmd/main.py000066400000000000000000000177521447142365600204240ustar00rootroot00000000000000"""Command line tool for aiobaf6.""" from __future__ import annotations import argparse import asyncio import difflib import ipaddress import logging.config import sys import typing as t from google.protobuf import text_format from zeroconf import IPVersion from zeroconf.asyncio import AsyncZeroconf from aiobafi6 import wireutils from aiobafi6.device import Device from aiobafi6.discovery import PORT, Service, ServiceBrowser from aiobafi6.proto import aiobafi6_pb2 ARGS = argparse.ArgumentParser( description="Command line tool for aiobaf6.\n\nThe tool supports a direct connection mode that is more powerful for debugging." # pylint: disable=line-too-long ) ARGS.add_argument( "-s", "--discover", action="store_true", dest="discover", help="discover devices", ) ARGS.add_argument( "-i", "--ip", action="store", dest="ip_addr", help="device address", ) ARGS.add_argument( "-d", "--dump", action="store_true", dest="dump", help="enable proto dumping", ) ARGS.add_argument( "-r", "--direct", action="store_true", dest="direct", help="directly connect to device, bypassing library", ) ARGS.add_argument( "-t", "--interval", action="store", dest="interval", default=15, type=int, help="property query interval in seconds", ) ARGS.add_argument( "property", nargs="?", help="property name", ) ARGS.add_argument( "value", nargs="?", help="property value", ) LOGGING = { "version": 1, "disable_existing_loggers": True, "formatters": { "simple": {"format": "%(levelname)s:%(asctime)s:%(name)s: %(message)s"}, }, "handlers": { "console": { "level": "DEBUG", "class": "logging.StreamHandler", "formatter": "simple", }, }, "loggers": { "": { "level": "INFO", "handlers": ["console"], }, "aiobafi6.device": { "level": "DEBUG", }, "aiobafi6.discovery": { "level": "DEBUG", }, }, } async def query_loop(writer: asyncio.StreamWriter, interval: int): """Run direct query loop.""" print(f"starting query loop: interval {interval}s") root = aiobafi6_pb2.Root() # pylint: disable=no-member root.root2.query.property_query = aiobafi6_pb2.ALL # pylint: disable=no-member writer.write(wireutils.serialize(root)) while True: await asyncio.sleep(interval) print("sending refresh query") root = aiobafi6_pb2.Root() # pylint: disable=no-member root.root2.query.property_query = aiobafi6_pb2.ALL # pylint: disable=no-member writer.write(wireutils.serialize(root)) async def direct_query_state(ip_addr: str, dump: bool, interval: int): """Query state directly.""" print(f"directly querying all state from {ip_addr}") reader, writer = await asyncio.open_connection(ip_addr, PORT) _ = asyncio.create_task(query_loop(writer, interval)) i = 0 previous = aiobafi6_pb2.Properties() # pylint: disable=no-member latest = aiobafi6_pb2.Properties() # pylint: disable=no-member unknown: dict[str, t.Any] = {} previous_sorted_unknown = [] while True: # The wire format frames protobuf messages with 0xc0, so the first `readuntil` # will return just that byte and the next will return the message with the # terminating byte. raw_buf = await reader.readuntil(b"\xc0") if len(raw_buf) == 1: continue buf = wireutils.remove_emulation_prevention(raw_buf[:-1]) if dump: with open(f"dump-query-{i}.bin", "wb") as writer: writer.write(buf) i += 1 root = aiobafi6_pb2.Root() # pylint: disable=no-member root.ParseFromString(buf) for prop in root.root2.query_result.properties: for field in prop.UnknownFields(): # type: ignore unknown[str(field.field_number)] = field.data for field in prop.capabilities.UnknownFields(): # type: ignore unknown["capabilities." + str(field.field_number)] = field.data for field in prop.stats.UnknownFields(): # type: ignore unknown["stats." + str(field.field_number)] = field.data root.DiscardUnknownFields() # type: ignore for prop in root.root2.query_result.properties: prop.ClearField("local_datetime") prop.ClearField("utc_datetime") prop.stats.ClearField("uptime_minutes") latest.MergeFrom(prop) diff = "".join( difflib.unified_diff( text_format.MessageToString(previous).splitlines(keepends=True), text_format.MessageToString(latest).splitlines(keepends=True), ) ) if len(diff) > 0: print(diff) previous.CopyFrom(latest) sorted_unknown = [f"{k}: {str(unknown[k])}\n" for k in sorted(unknown.keys())] diff = "".join(difflib.unified_diff(previous_sorted_unknown, sorted_unknown)) if len(diff) > 0: print(diff) previous_sorted_unknown = sorted_unknown def print_device(dev: Device): """Print a Device.""" print("<-- callback") print(dev.properties_proto) print("callback -->") async def query_state(ip_addr: str, interval: int): """Query state via a Device.""" print(f"querying all state from {ip_addr}: interval {interval}s") dev = Device( Service(ip_addresses=[ip_addr], port=PORT), query_interval_seconds=interval ) dev.add_callback(print_device) await dev.async_run() async def direct_set_property(ip_addr: str, prop: str, value: int, dump: bool): """Set a property directly.""" print(f"directly setting {prop} of {ip_addr} to {value}") root = aiobafi6_pb2.Root() # pylint: disable=no-member try: setattr(root.root2.commit.properties, prop, value) except TypeError: setattr(root.root2.commit.properties, prop, int(value)) buf = wireutils.serialize(root) if dump: with open(f"dump-set-{prop}-{value}.bin", "wb") as writer: writer.write(buf) _, writer = await asyncio.open_connection(ip_addr, PORT) writer.write(buf) await writer.drain() writer.close() await writer.wait_closed() async def set_property(ip_addr: str, prop: str, value: int): """Set a property via a Device.""" print(f"setting {prop} of {ip_addr} to {value}") dev = Device(Service(ip_addresses=[ip_addr], port=PORT), query_interval_seconds=0) dev.async_run() await dev.async_wait_available() try: setattr(dev, prop, value) except TypeError: setattr(dev, prop, int(value)) async def discover(): """async discover""" aiozc = AsyncZeroconf(ip_version=IPVersion.V4Only) _ = ServiceBrowser( aiozc.zeroconf, lambda services: print(f"== Discover ==\n{services}") ) while True: await asyncio.sleep(1) async def async_main(): """async_main""" args = ARGS.parse_args() logging.config.dictConfig(LOGGING) if args.discover: await discover() return if args.ip_addr is None: print("must provide an ip address") sys.exit(1) try: ip_addr = ipaddress.ip_address(args.ip_addr) except ValueError: print(f"invalid address: {args.ip_addr}") sys.exit(1) if args.property is not None: if args.value is None: raise RuntimeError("must specify property value") if args.direct: await direct_set_property( str(ip_addr), args.property, args.value, dump=args.dump ) else: await set_property(str(ip_addr), args.property, args.value) elif args.direct: await direct_query_state(str(ip_addr), dump=args.dump, interval=args.interval) else: await query_state(str(ip_addr), interval=args.interval) def main(): """main""" try: asyncio.run(async_main()) except KeyboardInterrupt: pass if __name__ == "__main__": main() jfroy-aiobafi6-311c9f2/aiobafi6/const.py000066400000000000000000000004371447142365600200530ustar00rootroot00000000000000"""Constants for aiobafi6.""" # The default delay between device connection attempts. DELAY_BETWEEN_CONNECTS_SECONDS = 30 # The minimum device API version supported by aiobafi6. MIN_API_VERSION = 4 # The minimum device API version for occupancy support. OCCUPANCY_MIN_API_VERSION = 5 jfroy-aiobafi6-311c9f2/aiobafi6/device.py000066400000000000000000000631541447142365600201710ustar00rootroot00000000000000"""aiobafi6 device. Provides functionality to query and control BAF i6 protocol devices. """ from __future__ import annotations import asyncio import copy import inspect import logging import time import typing as t from google.protobuf import json_format from google.protobuf.message import Message from . import wireutils from .const import DELAY_BETWEEN_CONNECTS_SECONDS, OCCUPANCY_MIN_API_VERSION from .discovery import Service from .exceptions import DeviceUUIDMismatchError from .proto import aiobafi6_pb2 from .protoprop import ( ClosedIntervalValidator, OffOnAuto, ProtoProp, from_proto_humidity, from_proto_temperature, maybe_proto_field, to_proto_temperature, ) __all__ = ( "VOLATILE_PROPERTIES", "Device", ) VOLATILE_PROPERTIES = ( "current_rpm", "local_datetime", "stats", "utc_datetime", ) _LOGGER = logging.getLogger(__name__) _MAX_SPEED = 7 _RECV_BUFFER_LIMIT = 4096 # No message is ever expected to be > 4K _PROPS_REQUIRED_FOR_AVAILABLE = ( "name", "model", "firmware_version", "mac_address", "dns_sd_uuid", "capabilities", "ip_address", ) def _clear_volatile_props(props: aiobafi6_pb2.Properties): """Clear volatile properties from `props`.""" for field in VOLATILE_PROPERTIES: props.ClearField(field) class Device: """A BAF i6 protocol device. The design the of class is relatively simple. Since the protocol is based on protofbuf, the majority of a device's state can be stored in a `Properties` message. The query loop simply updates this message using `MergeFrom`, with unknown fields removed (as they are otherwise treated as repeated fields and would lead to unbound memory growth). Synthetic properties expose the protobuf to clients. A device must be initialized with a `Service`, either obtained using the `discovery` module or manually created. The only required fields are `ip_addresses` and `port`. A `Device` object is initially inert. A client must call its `async_run` method to connect the device, process state changes and handle device updates. A `asyncio.Future` is returned to monitor and stop the device. A device has an `available` property which is true when the device is connected and has received basic properties from the firmware. The `async_wait_available` coroutine can be used to wait for a device to become available (which may never happen). If the `uuid` field of the `Service` used to initialize a device is set, the library will validate it against the device's `dns_sd_uuid` property after connecting to the device and receiving basic properties. If the UUIDs do not match, the device is stopped and a `DeviceUUIDMismatchError` exception is set on the run future and raised in `async_wait_available` coroutines. To disable periodic properties queries, set `query_interval_seconds` to 0. Clients can register callbacks to be notified when one or more device properties have changed. Callbacks are suppressed when no actual changes are observed (i.e. the device is in a steady state). Callbacks are also be suppressed when only so-called volatile properties have changed, such as fan RPM, device uptime or the device's internal clock. This can be disabled by setting `ignore_volatile_props` to False. These properties are still queried and available to read from the device. """ def __init__( self, service: Service, query_interval_seconds: int = 60, ignore_volatile_props: bool = True, delay_between_connects_seconds: int = DELAY_BETWEEN_CONNECTS_SECONDS, ): if len(service.ip_addresses) == 0 or service.port == 0: raise ValueError( f"Invalid service: must have at least one address and a port: {service}" ) self._service = copy.deepcopy(service) self._query_interval_seconds = query_interval_seconds self._ignore_volatile_props = ignore_volatile_props # Permanent Properties protobuf into which query results are merged. self._properties = aiobafi6_pb2.Properties() # pylint: disable=no-member # Device update callbacks. self._callbacks: list[t.Callable[[Device], None]] = [] self._coro_callbacks: list[t.Coroutine] = [] self._dispatch_coro_callback_tasks: t.Set[asyncio.Task] = set() # Connection and periodic queries. self._loop = asyncio.get_running_loop() if self._loop is None: raise RuntimeError("no running loop") self._run_fut: t.Optional[asyncio.Future] = None self._next_connect_ts: float = time.monotonic() self._connect_timer: t.Optional[asyncio.TimerHandle] = None self._connect_task: t.Optional[asyncio.Task] = None self._delay_between_connects_seconds = delay_between_connects_seconds self._transport: t.Optional[asyncio.Transport] = None self._protocol: t.Optional[Protocol] = None self._query_timer: t.Optional[asyncio.TimerHandle] = None # Availability. self._available_fut: asyncio.Future = self._loop.create_future() def __eq__(self, other: t.Any) -> bool: if isinstance(other, Device): return self.dns_sd_uuid == t.cast(Device, other).dns_sd_uuid if isinstance(other, str): return other == self.name return NotImplemented def __hash__(self) -> int: return hash(self.dns_sd_uuid) def __str__(self) -> str: string = f"Name: {self.name}" string += f", Model: {self.model}" string += f", DNS SD UUID: {self.dns_sd_uuid}" string += f", IP: {self.ip_address}" string += f", MAC: {self.mac_address}" if self.firmware_version is not None: string += f", Firmware: {self.firmware_version}" if self.has_light is not None: string += f", Has Light: {self.has_light}" return string @property def service(self) -> Service: # pylint: disable=missing-function-docstring return copy.deepcopy(self._service) @property def properties_dict(self) -> dict[str, t.Any]: """Return a dict created by merging the device's service and properties.""" propsd = { "dns_sd_uuid": self._service.uuid, "service_name": self._service.service_name, "name": self._service.device_name, "model": self._service.model, "api_version": self._service.api_version, "ip_addresses": self._service.ip_addresses, "port": self._service.port, } propsd.update( json_format.MessageToDict( t.cast(Message, self._properties), preserving_proto_field_name=True ) ) return propsd @property def properties_proto( # pylint: disable=missing-function-docstring self, ) -> aiobafi6_pb2.Properties: props = aiobafi6_pb2.Properties() # pylint: disable=no-member props.CopyFrom(self._properties) return props # Client callbacks def add_callback(self, callback: t.Callable[[Device], None]) -> None: """Add a device update callback. The callback must be a `Callable` with a `Device` argument. """ is_coroutine = inspect.iscoroutinefunction(callback) if is_coroutine: if callback not in self._coro_callbacks: self._coro_callbacks.append(callback) # type: ignore _LOGGER.debug("%s: Added coroutine callback.", self.name) return if callback not in self._callbacks: self._callbacks.append(callback) _LOGGER.debug("%s: Added function callback.", self.name) def remove_callback(self, callback) -> None: """Remove a device update callback.""" if callback in self._coro_callbacks: self._coro_callbacks.remove(callback) _LOGGER.debug("%s: Removed coroutine callback.", self.name) return if callback in self._callbacks: self._callbacks.remove(callback) _LOGGER.debug("%s: Removed function callback.", self.name) return def _dispatch_callbacks(self) -> None: """Dispatch registered device update callbacks. An async task is created for coroutine callbacks. Function callbacks are executed synchronously inside a try-except block to swallow any error.""" for callback in self._callbacks: try: callback(self) except Exception: # pylint: disable=broad-except _LOGGER.exception("Exception raised during callback.") for coro in self._coro_callbacks: task = asyncio.create_task(coro(self)) # type: ignore self._dispatch_coro_callback_tasks.add(task) task.add_done_callback(self._dispatch_coro_callback_tasks.remove) _LOGGER.debug( "%s: Dispatched %s client callbacks.", self.name, len(self._callbacks) + len(self._coro_callbacks), ) # protoprop support def _maybe_property(self, field: str) -> t.Optional[t.Any]: return maybe_proto_field(t.cast(Message, self._properties), field) def _commit_property(self, prop: aiobafi6_pb2.Properties) -> None: """Commit a property to the device. This does not update the properties reflected by the `Device` object. That will happen once the device confirms the change by doing a properties push. Is it unknown if the firmware generally supports writing more than one property in one transaction. """ if self._transport is None: _LOGGER.warning( "%s: Dropping property commit because device is not connected: %s", self.name, prop, ) return root = aiobafi6_pb2.Root() # pylint: disable=no-member root.root2.commit.properties.CopyFrom(prop) _LOGGER.debug("%s: Sending commit:\n%s", self.name, root) self._transport.write(wireutils.serialize(root)) # Connection and query machinery def _sched_connect_or_reset(self): """Schedule a `_connect` invocation or reset the device to be run again. This function is the entrypoint of the internal run state machine. It is called when there is no connection (`_start`), when a connection could not be established (error or timeout) or the connection has been closed, or when the device is stopped (`_stop`).""" assert self._connect_timer is None assert self._connect_task is None assert self._query_timer is None assert self._transport is None assert self._protocol is None assert self._run_fut # If the run future is done, then reset it to None and return. # `_sched_connect_or_reset` is the entrypoint of the internal run state machine # and therefore this is the right place to make the device runnable again (by # clearing the run future). if self._run_fut.done(): _LOGGER.debug("%s: Resetting device for new run.", self.name) self._run_fut = None return _LOGGER.debug("%s: Scheduling next connect invocation.", self.name) self._connect_timer = self._loop.call_at( self._next_connect_ts, self._connect, ) def _connect(self) -> None: self._connect_timer = None self._next_connect_ts = time.monotonic() + self._delay_between_connects_seconds _LOGGER.debug( "%s: Connecting to %s:%s.", self.name, self._service.ip_addresses[0], self._service.port, ) connect_task = asyncio.create_task( self._loop.create_connection( lambda: Protocol(self), self._service.ip_addresses[0], self._service.port, ) ) connect_task.add_done_callback(self._finish_connect) self._loop.call_later(self._delay_between_connects_seconds, connect_task.cancel) self._connect_task = connect_task def _finish_connect(self, task: asyncio.Task) -> None: assert self._connect_task is task self._connect_task = None try: transport, protocol = task.result() _LOGGER.debug( "%s: Connected to %s.", self.name, transport.get_extra_info("peername") ) self._transport = transport self._protocol = protocol self._loop.call_soon(self._query) except (OSError, asyncio.CancelledError) as err: _LOGGER.debug("%s: Connection failed: %s", self.name, err) self._sched_connect_or_reset() def _handle_connection_lost(self, exc: t.Optional[Exception]) -> None: _LOGGER.debug("%s: Connection lost: %s", self.name, exc) if self._query_timer is not None: self._query_timer.cancel() self._query_timer = None self._transport = None self._protocol = None self._sched_connect_or_reset() def _process_message(self, data: bytes) -> None: root = aiobafi6_pb2.Root() # pylint: disable=no-member root.ParseFromString(data) _LOGGER.debug("%s: Received message: %s", self.name, root) # Discard unknown fields because `MergeFrom` treats them as repeated. root.DiscardUnknownFields() # type: ignore previous = self.properties_proto for prop in root.root2.query_result.properties: self._properties.MergeFrom(prop) if not self.available: self._maybe_set_available() current = self.properties_proto if self._ignore_volatile_props: _clear_volatile_props(previous) _clear_volatile_props(current) if self.available and current != previous: self._dispatch_callbacks() def _maybe_set_available(self): """Set the device as available if all required properties are set.""" for pname in _PROPS_REQUIRED_FOR_AVAILABLE: if not self._properties.HasField(pname): return if self._service.uuid is not None and self._service.uuid != self.dns_sd_uuid: _LOGGER.error( "%s: Device UUID (%s) does not match service UUID (%s): stopping.", self.name, self.dns_sd_uuid, self._service.uuid, ) assert self._run_fut if not self._run_fut.done(): self._run_fut.set_exception(DeviceUUIDMismatchError) return _LOGGER.debug("%s: Setting device as available.", self.name) self._available_fut.set_result(True) def _query(self) -> None: self._query_timer = None # The first `_query` of a connection is scheduled with `call_soon` and can't # be cancelled, so it's possible (though unlikely) for `_transport` to be None. # If that's the case, just bail out. if self._transport is None: return root = aiobafi6_pb2.Root() # pylint: disable=no-member root.root2.query.property_query = aiobafi6_pb2.ALL # pylint: disable=no-member _LOGGER.debug("%s: Sending query:\n%s", self.name, root) self._transport.write(wireutils.serialize(root)) if self._query_interval_seconds > 0: self._query_timer = self._loop.call_later( self._query_interval_seconds, self._query ) def async_run(self) -> asyncio.Future: """Run the device asynchronously. A running `Device` schedules functions on the run loop to maintain a connection to the device, sends periodic property queries, and services query commits. Returns a future that will resolve when the device stops. Cancelling this future will stop the device. """ if self._run_fut is None: self._start() assert self._run_fut is not None return self._run_fut def _start(self): """Start the device. This function schedules the device to connect on the next run loop iteration. From there on, the device will continue scheduling functions to maintain the connection, send periodic property queries, and service query commits. """ assert self._run_fut is None _LOGGER.debug("%s: Starting.", self.name) self._run_fut = self._loop.create_future() def stop_on_done(_: asyncio.Future): self._stop() self._run_fut.add_done_callback(stop_on_done) self._sched_connect_or_reset() def _stop(self) -> None: """Stop the device. This function ultimately causes `_sched_connect_or_reset` to be called by cancelling the appropriate in-flight task. This function also creates a new available future, thus marking the device as unavailable. If the run future became done because of an exception or because it was cancelled, that is propagated to the prior available future. Otherwise, the prior available future is never signalled.""" _LOGGER.debug("%s: Stopping.", self.name) # Propagate run exception or cancellation to the available future, then reset it # to set the device as unavailable. assert self._run_fut if self._run_fut.cancelled(): self._available_fut.cancel() else: run_exc = self._run_fut.exception() if run_exc is not None: self._available_fut.set_exception(run_exc) self._available_fut = self._loop.create_future() # Dispatch client callbacks, since some clients may observe the `available` # property through a callback. self._dispatch_callbacks() # If there is an active connection, close it. if self._transport is not None: self._transport.close() # Otherwise, if the device is opening a connection, cancel that. elif self._connect_task is not None: self._connect_task.cancel() # Otherwise, if `_connect` is scheduled, cancel that and call # `_sched_connect_or_reset` directly because nothing else will. elif self._connect_timer is not None: self._connect_timer.cancel() self._connect_timer = None self._sched_connect_or_reset() # Availability @property def available(self) -> bool: """Return True when device is running and has values for critical properties.""" available_fut = self._available_fut return available_fut.done() and not available_fut.exception() async def async_wait_available(self) -> None: """Asynchronously wait for the device to be available.""" await self._available_fut # General @property def name(self) -> str: # pylint: disable=missing-function-docstring if len(self._properties.name) > 0: return self._properties.name if ( self._service.service_name is not None and len(self._service.service_name) > 0 ): return self._service.service_name if len(self._properties.mac_address) > 0: return self._properties.mac_address return self._service.ip_addresses[0] @property def model(self) -> t.Optional[str]: # pylint: disable=missing-function-docstring if len(self._properties.model) > 0: return self._properties.model return self._service.model firmware_version = ProtoProp[t.Optional[str]]() mac_address = ProtoProp[t.Optional[str]]() # API @property def dns_sd_uuid( # pylint: disable=missing-function-docstring self, ) -> t.Optional[str]: if len(self._properties.dns_sd_uuid) > 0: return self._properties.dns_sd_uuid return self._service.uuid @property def api_version( # pylint: disable=missing-function-docstring self, ) -> t.Optional[str]: if len(self._properties.api_version) > 0: return self._properties.api_version return self._service.api_version @property def has_fan(self) -> bool: # pylint: disable=missing-function-docstring # TODO(#1): Support light-only devices. return True @property def has_light( # pylint: disable=missing-function-docstring self, ) -> t.Optional[bool]: return maybe_proto_field(self._properties.capabilities, "has_light") @property def has_auto_comfort(self) -> bool: # pylint: disable=missing-function-docstring # https://github.com/home-assistant/core/issues/72934 hc1 = maybe_proto_field(self._properties.capabilities, "has_comfort1") or False hc3 = maybe_proto_field(self._properties.capabilities, "has_comfort3") or False return hc1 and hc3 @property def has_occupancy(self) -> bool: # pylint: disable=missing-function-docstring try: api_version = int(self.api_version or 0) except ValueError: api_version = 0 # There is probably a capability flag for this but it is unknown. Speculatively, # a device that supports auto comfort is assumed to supports occupancy. return api_version >= OCCUPANCY_MIN_API_VERSION and self.has_auto_comfort # Fan # pylint: disable=unnecessary-lambda fan_mode = ProtoProp[OffOnAuto](writable=True, from_proto=lambda v: OffOnAuto(v)) reverse_enable = ProtoProp[bool](writable=True) speed_percent = ProtoProp[int]() speed = ProtoProp[int]( writable=True, to_proto=ClosedIntervalValidator[int](0, _MAX_SPEED) ) whoosh_enable = ProtoProp[bool](writable=True) eco_enable = ProtoProp[bool](writable=True) auto_comfort_enable = ProtoProp[bool](writable=True) comfort_ideal_temperature = ProtoProp[float]( writable=True, to_proto=to_proto_temperature, from_proto=from_proto_temperature, ) comfort_heat_assist_enable = ProtoProp[bool](writable=True) comfort_heat_assist_speed = ProtoProp[int](writable=True) comfort_heat_assist_reverse_enable = ProtoProp[bool](writable=True) comfort_min_speed = ProtoProp[int]( writable=True, to_proto=ClosedIntervalValidator[int](0, _MAX_SPEED) ) comfort_max_speed = ProtoProp[int]( writable=True, to_proto=ClosedIntervalValidator[int](0, _MAX_SPEED) ) motion_sense_enable = ProtoProp[bool](writable=True) motion_sense_timeout = ProtoProp[int](writable=True) return_to_auto_enable = ProtoProp[bool](writable=True) return_to_auto_timeout = ProtoProp[int](writable=True) target_rpm = ProtoProp[int]() current_rpm = ProtoProp[int]() fan_occupancy_detected = ProtoProp[bool](min_api_version=OCCUPANCY_MIN_API_VERSION) # Light # pylint: disable=unnecessary-lambda light_mode = ProtoProp[OffOnAuto](writable=True, from_proto=lambda v: OffOnAuto(v)) light_brightness_percent = ProtoProp[int](writable=True) light_brightness_level = ProtoProp[int](writable=True) light_color_temperature = ProtoProp[int](writable=True) light_dim_to_warm_enable = ProtoProp[bool](writable=True) light_auto_motion_timeout = ProtoProp[int](writable=True) light_return_to_auto_enable = ProtoProp[bool](writable=True) light_return_to_auto_timeout = ProtoProp[int](writable=True) light_warmest_color_temperature = ProtoProp[int]() light_coolest_color_temperature = ProtoProp[int]() light_occupancy_detected = ProtoProp[bool]( min_api_version=OCCUPANCY_MIN_API_VERSION ) # Sensors temperature = ProtoProp[float]( to_proto=to_proto_temperature, from_proto=from_proto_temperature, ) humidity = ProtoProp[int](from_proto=from_proto_humidity) # Connectivity @property def ip_address(self) -> str: # pylint: disable=missing-function-docstring if len(self._properties.ip_address) > 0: return self._properties.ip_address return self._service.ip_addresses[0] @property def wifi_ssid( # pylint: disable=missing-function-docstring self, ) -> t.Optional[str]: return maybe_proto_field(self._properties.wifi, "ssid") # More led_indicators_enable = ProtoProp[bool](writable=True) fan_beep_enable = ProtoProp[bool](writable=True) legacy_ir_remote_enable = ProtoProp[bool](writable=True) class Protocol(asyncio.Protocol): """AsyncIO Protocol for BAF i6.""" __slots__ = ("_device", "_transport", "_buffer") def __init__(self, device: Device): self._device = device self._transport: t.Optional[asyncio.Transport] = None self._buffer = bytearray() def connection_made(self, transport: asyncio.Transport) -> None: self._transport = transport def connection_lost(self, exc: t.Optional[Exception]) -> None: self._device._handle_connection_lost(exc) # pylint: disable=protected-access self._transport = None def data_received(self, data: bytes) -> None: assert self._transport is not None if len(self._buffer) + len(data) > _RECV_BUFFER_LIMIT: raise RuntimeError("Exceeded buffering limit.") self._buffer.extend(data) while len(self._buffer) > 1: if self._buffer[0] != 0xC0: _LOGGER.error("Receive buffer does not start with sync byte.") self._transport.abort() break end = self._buffer.find(0xC0, 1) if end == -1: break if end == 1: _LOGGER.error("Empty message found in receive buffer.") self._transport.abort() break self._device._process_message( # pylint: disable=protected-access wireutils.remove_emulation_prevention(self._buffer[1:end]) ) self._buffer = self._buffer[end + 1 :] jfroy-aiobafi6-311c9f2/aiobafi6/device_test.py000066400000000000000000000124621447142365600212240ustar00rootroot00000000000000# pylint: disable=protected-access, missing-class-docstring, missing-function-docstring, invalid-name """Tests for device.""" from __future__ import annotations import asyncio import typing as t import pytest from .device import Device from .discovery import PORT, Service from .exceptions import DeviceUUIDMismatchError from .proto import aiobafi6_pb2 @pytest.mark.asyncio async def test_device_init_copies_service(): s = Service(("127.0.0.1",), PORT) d = Device(s) assert s == d._service s.ip_addresses = ("127.0.0.2",) assert d._service.ip_addresses == ("127.0.0.1",) @pytest.mark.asyncio async def test_service_property_copies(): d = Device(Service(("127.0.0.1",), PORT)) s = d.service assert s == d._service s.ip_addresses = ("127.0.0.2",) assert d._service.ip_addresses == ("127.0.0.1",) @pytest.mark.asyncio async def test_service_property_read_only(): d = Device(Service(("127.0.0.1",), PORT)) with pytest.raises(AttributeError): d.service = Service(("127.0.0.2",), PORT) # type: ignore @pytest.mark.asyncio async def test_has_auto_comfort(): d = Device(Service(("127.0.0.1",), PORT)) assert not d.has_auto_comfort d._properties.capabilities.has_comfort1 = False assert not d.has_auto_comfort d._properties.capabilities.ClearField("has_comfort1") d._properties.capabilities.has_comfort3 = False assert not d.has_auto_comfort d._properties.capabilities.has_comfort1 = False d._properties.capabilities.has_comfort3 = False assert not d.has_auto_comfort d._properties.capabilities.ClearField("has_comfort3") d._properties.capabilities.has_comfort1 = True assert not d.has_auto_comfort d._properties.capabilities.ClearField("has_comfort1") d._properties.capabilities.has_comfort3 = True assert not d.has_auto_comfort d._properties.capabilities.has_comfort1 = True d._properties.capabilities.has_comfort3 = True assert d.has_auto_comfort @pytest.mark.asyncio async def test_no_redundant_callback(): d = Device(Service(("127.0.0.1",), PORT)) d._available_fut.set_result(True) called = False def callback(_: Device): nonlocal called called = True d.add_callback(callback) root = aiobafi6_pb2.Root() # pylint: disable=no-member prop = root.root2.query_result.properties.add() prop.speed = 1 buf = root.SerializeToString() d._process_message(buf) assert d._properties.speed == 1 assert called called = False d._process_message(buf) assert not called @pytest.mark.asyncio async def test_ignore_volatile_props(): d = Device(Service(("127.0.0.1",), PORT), ignore_volatile_props=True) d._available_fut.set_result(True) called = False def callback(_: Device): nonlocal called called = True d.add_callback(callback) root = aiobafi6_pb2.Root() # pylint: disable=no-member prop = root.root2.query_result.properties.add() prop.current_rpm = 42 buf = root.SerializeToString() d._process_message(buf) assert d._properties.current_rpm == 42 assert not called @pytest.mark.asyncio async def test_no_ignore_volatile_props(): d = Device(Service(("127.0.0.1",), PORT), ignore_volatile_props=False) d._available_fut.set_result(True) called = False def callback(_: Device): nonlocal called called = True d.add_callback(callback) root = aiobafi6_pb2.Root() # pylint: disable=no-member prop = root.root2.query_result.properties.add() prop.current_rpm = 42 buf = root.SerializeToString() d._process_message(buf) assert d._properties.current_rpm == 42 assert called @pytest.mark.asyncio async def test_cancel_between_connect_attempt(): d = Device( Service(("127.0.0.1",), PORT), ignore_volatile_props=False, delay_between_connects_seconds=1, ) except_context: t.Optional[dict[str, t.Any]] = None def exception_handler(_: t.Any, context: dict[str, t.Any]) -> None: nonlocal except_context except_context = context def cancel_fut(fut: asyncio.Future): fut.cancel() run_fut = d.async_run() loop = run_fut.get_loop() loop.set_exception_handler(exception_handler) loop.call_later(1.5, cancel_fut, run_fut) with pytest.raises(asyncio.CancelledError): await run_fut assert except_context is None run_fut = d.async_run() loop.call_later(1.5, cancel_fut, run_fut) with pytest.raises(asyncio.CancelledError): await d.async_wait_available() @pytest.mark.asyncio async def test_uuid_mismatch(): d = Device( Service(("127.0.0.1",), PORT, uuid="A"), ignore_volatile_props=False, delay_between_connects_seconds=1, ) run_fut = d.async_run() avail_fut = d.async_wait_available() root = aiobafi6_pb2.Root() # pylint: disable=no-member prop = root.root2.query_result.properties.add() prop.name = "name" prop.model = "model" prop.firmware_version = "firmware_version" prop.mac_address = "mac_address" prop.dns_sd_uuid = "B" prop.capabilities.SetInParent() prop.ip_address = "ip_address" buf = root.SerializeToString() d._process_message(buf) with pytest.raises(DeviceUUIDMismatchError): await run_fut with pytest.raises(DeviceUUIDMismatchError): await avail_fut jfroy-aiobafi6-311c9f2/aiobafi6/discovery.py000066400000000000000000000135631447142365600207400ustar00rootroot00000000000000"""aiobafi6 discovery. Provides functionality to discover BAF i6 API services. """ from __future__ import annotations import asyncio import inspect import logging from dataclasses import dataclass from typing import Callable, Dict, Optional, Sequence, Set, Tuple from zeroconf import ServiceStateChange, Zeroconf from zeroconf.asyncio import AsyncServiceBrowser, AsyncServiceInfo from .const import MIN_API_VERSION __all__ = ("PORT", "ZEROCONF_SERVICE_TYPE", "Service", "ServiceBrowser") _LOGGER = logging.getLogger(__name__) """Default service API port number. Only use to manually create a `Service` object.""" PORT = 31415 """Zeroconf service type for BAF API DNS service discovery.""" ZEROCONF_SERVICE_TYPE = "_api._tcp.local." @dataclass class Service: """Represents a BAF i6 API service. A service is uniquely identified by a device UUID and provides a device name, model name, and API endpoints (address:port). A Device object can be created using a Service object. """ ip_addresses: Tuple[str] port: int uuid: Optional[str] = None service_name: Optional[str] = None device_name: Optional[str] = None model: Optional[str] = None api_version: Optional[str] = None def __init__( self, ip_addresses: Sequence[str], port: int, uuid: Optional[str] = None, service_name: Optional[str] = None, device_name: Optional[str] = None, model: Optional[str] = None, api_version: Optional[str] = None, ): self.ip_addresses = tuple(ip for ip in ip_addresses) self.port = port self.uuid = uuid self.service_name = service_name self.device_name = device_name self.model = model self.api_version = api_version class ServiceBrowser: """Discovers BAF i6 API services. This class manages a `AsyncServiceBrowser` bound to a provided `Zeroconf` object to discover BAF i6 API services. The browser will call `callback` with a tuple of `Service` objects whenever the browser detects a change in service availability. """ def __init__(self, zconf: Zeroconf, callback: Callable): self._callback = callback # Map device UUID to Service object. When a device is renamed, the service # record with the old name won't be removed until a TTL expires, so the # service/device name is not a good key. self._service_map: Dict[str, Service] = {} # Set of outstanding tasks spawned from the service browser. self._tasks: Set[asyncio.Task] = set() self._asb = AsyncServiceBrowser( zconf, ["_api._tcp.local."], handlers=[self._on_state_change] ) def _dispatch_callback(self) -> None: services = tuple(s for s in self._service_map.values()) if inspect.iscoroutinefunction(self._callback): task = asyncio.create_task(self._callback(services)) self._tasks.add(task) task.add_done_callback(self._tasks.remove) else: self._callback(services) async def _async_resolve_service( self, zeroconf: Zeroconf, service_type: str, service_name: str ) -> None: info = AsyncServiceInfo(service_type, service_name) if not await info.async_request(zeroconf, 3000): _LOGGER.info("Failed to resolve service %s.", service_name) return if info.properties is None: _LOGGER.info("Service %s has no properties.", service_name) return if len(info.addresses) == 0: _LOGGER.info("Service %s has no addresses.", service_name) return if info.port is None: _LOGGER.info("Service %s has no port.", service_name) return try: api_version = info.properties[b"api version"].decode("utf-8") api_version_int = int(api_version) model = info.properties[b"model"].decode("utf-8") uuid = info.properties[b"uuid"].decode("utf-8") device_name = info.properties[b"name"].decode("utf-8") except (ValueError, KeyError) as err: _LOGGER.info( "Failed to parse service properties for %s: %s\n%s", service_name, err, info.properties, ) return if api_version_int < MIN_API_VERSION: _LOGGER.info( "Ignoring service %s because api_version is < %d: %s", service_name, MIN_API_VERSION, api_version, ) return _LOGGER.info( "Resolved service %s: device_name=`%s`, model=`%s`, uuid=%s, " " api_version=%s, ip_addresses=%s, port=%s", service_name, device_name, model, uuid, api_version, info.parsed_scoped_addresses(), info.port, ) service = Service( info.parsed_scoped_addresses(), info.port, uuid, service_name, device_name, model, api_version, ) self._service_map[uuid] = service self._dispatch_callback() def _on_state_change( self, zeroconf: Zeroconf, service_type: str, name: str, state_change: ServiceStateChange, ) -> None: _LOGGER.info("Service %s state changed: %s", name, state_change) if state_change == ServiceStateChange.Removed: for k in tuple(self._service_map.keys()): if self._service_map[k].service_name == name: del self._service_map[k] self._dispatch_callback() else: task = asyncio.create_task( self._async_resolve_service(zeroconf, service_type, name) ) self._tasks.add(task) task.add_done_callback(self._tasks.remove) jfroy-aiobafi6-311c9f2/aiobafi6/exceptions.py000066400000000000000000000003471447142365600211060ustar00rootroot00000000000000"""Exceptions.""" from __future__ import annotations class Error(Exception): """Base class for aiobafi6 errors.""" class DeviceUUIDMismatchError(Error): """Raised if init service UUID does not match the device UUID.""" jfroy-aiobafi6-311c9f2/aiobafi6/proto/000077500000000000000000000000001447142365600175125ustar00rootroot00000000000000jfroy-aiobafi6-311c9f2/aiobafi6/proto/aiobafi6_pb2.py000066400000000000000000000132511447142365600223110ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: proto/aiobafi6.proto """Generated protocol buffer code.""" from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14proto/aiobafi6.proto\x12\x08\x61iobafi6\"&\n\x04Root\x12\x1e\n\x05root2\x18\x02 \x01(\x0b\x32\x0f.aiobafi6.Root2\"v\n\x05Root2\x12 \n\x06\x63ommit\x18\x02 \x01(\x0b\x32\x10.aiobafi6.Commit\x12\x1e\n\x05query\x18\x03 \x01(\x0b\x32\x0f.aiobafi6.Query\x12+\n\x0cquery_result\x18\x04 \x01(\x0b\x32\x15.aiobafi6.QueryResult\"2\n\x06\x43ommit\x12(\n\nproperties\x18\x03 \x01(\x0b\x32\x14.aiobafi6.Properties\"7\n\x05Query\x12.\n\x0eproperty_query\x18\x01 \x01(\x0e\x32\x16.aiobafi6.ProperyQuery\"^\n\x0bQueryResult\x12(\n\nproperties\x18\x02 \x03(\x0b\x32\x14.aiobafi6.Properties\x12%\n\tschedules\x18\x03 \x03(\x0b\x32\x12.aiobafi6.Schedule\"\x97\x0c\n\nProperties\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05model\x18\x02 \x01(\t\x12\x16\n\x0elocal_datetime\x18\x04 \x01(\t\x12\x14\n\x0cutc_datetime\x18\x05 \x01(\t\x12\x18\n\x10\x66irmware_version\x18\x07 \x01(\t\x12\x13\n\x0bmac_address\x18\x08 \x01(\t\x12\r\n\x05uuid9\x18\t \x01(\t\x12\x13\n\x0b\x64ns_sd_uuid\x18\n \x01(\t\x12\x14\n\x0c\x61pi_endpoint\x18\x0b \x01(\t\x12\x13\n\x0b\x61pi_version\x18\r \x01(\t\x12.\n\x08\x66irmware\x18\x10 \x01(\x0b\x32\x1c.aiobafi6.FirmwareProperties\x12,\n\x0c\x63\x61pabilities\x18\x11 \x01(\x0b\x32\x16.aiobafi6.Capabilities\x12%\n\x08\x66\x61n_mode\x18+ \x01(\x0e\x32\x13.aiobafi6.OffOnAuto\x12\x16\n\x0ereverse_enable\x18, \x01(\x08\x12\x15\n\rspeed_percent\x18- \x01(\x05\x12\r\n\x05speed\x18. \x01(\x05\x12\x15\n\rwhoosh_enable\x18: \x01(\x08\x12\x12\n\neco_enable\x18\x41 \x01(\x08\x12\x1b\n\x13\x61uto_comfort_enable\x18/ \x01(\x08\x12!\n\x19\x63omfort_ideal_temperature\x18\x30 \x01(\x05\x12\"\n\x1a\x63omfort_heat_assist_enable\x18< \x01(\x08\x12!\n\x19\x63omfort_heat_assist_speed\x18= \x01(\x05\x12*\n\"comfort_heat_assist_reverse_enable\x18> \x01(\x08\x12\x19\n\x11\x63omfort_min_speed\x18\x32 \x01(\x05\x12\x19\n\x11\x63omfort_max_speed\x18\x33 \x01(\x05\x12\x1b\n\x13motion_sense_enable\x18\x34 \x01(\x08\x12\x1c\n\x14motion_sense_timeout\x18\x35 \x01(\x05\x12\x1d\n\x15return_to_auto_enable\x18\x36 \x01(\x08\x12\x1e\n\x16return_to_auto_timeout\x18\x37 \x01(\x05\x12\x12\n\ntarget_rpm\x18? \x01(\x05\x12\x13\n\x0b\x63urrent_rpm\x18@ \x01(\x05\x12\x1e\n\x16\x66\x61n_occupancy_detected\x18\x42 \x01(\x08\x12\'\n\nlight_mode\x18\x44 \x01(\x0e\x32\x13.aiobafi6.OffOnAuto\x12 \n\x18light_brightness_percent\x18\x45 \x01(\x05\x12\x1e\n\x16light_brightness_level\x18\x46 \x01(\x05\x12\x1f\n\x17light_color_temperature\x18G \x01(\x05\x12 \n\x18light_dim_to_warm_enable\x18M \x01(\x08\x12!\n\x19light_auto_motion_timeout\x18I \x01(\x05\x12#\n\x1blight_return_to_auto_enable\x18J \x01(\x08\x12$\n\x1clight_return_to_auto_timeout\x18K \x01(\x05\x12\'\n\x1flight_warmest_color_temperature\x18N \x01(\x05\x12\'\n\x1flight_coolest_color_temperature\x18O \x01(\x05\x12 \n\x18light_occupancy_detected\x18U \x01(\x08\x12\x13\n\x0btemperature\x18V \x01(\x05\x12\x10\n\x08humidity\x18W \x01(\x05\x12\x12\n\nip_address\x18x \x01(\t\x12&\n\x04wifi\x18| \x01(\x0b\x32\x18.aiobafi6.WifiProperties\x12\x1e\n\x15led_indicators_enable\x18\x86\x01 \x01(\x08\x12\x18\n\x0f\x66\x61n_beep_enable\x18\x87\x01 \x01(\x08\x12 \n\x17legacy_ir_remote_enable\x18\x88\x01 \x01(\x08\x12\x36\n\x0fremote_firmware\x18\x98\x01 \x01(\x0b\x32\x1c.aiobafi6.FirmwareProperties\x12\x1f\n\x05stats\x18\x9c\x01 \x01(\x0b\x32\x0f.aiobafi6.Stats\"_\n\x12\x46irmwareProperties\x12\x18\n\x10\x66irmware_version\x18\x02 \x01(\t\x12\x1a\n\x12\x62ootloader_version\x18\x03 \x01(\t\x12\x13\n\x0bmac_address\x18\x04 \x01(\t\"M\n\x0c\x43\x61pabilities\x12\x14\n\x0chas_comfort1\x18\x01 \x01(\x08\x12\x14\n\x0chas_comfort3\x18\x03 \x01(\x08\x12\x11\n\thas_light\x18\x04 \x01(\x08\"\x1e\n\x0eWifiProperties\x12\x0c\n\x04ssid\x18\x01 \x01(\t\"\n\n\x08Schedule\"\x1f\n\x05Stats\x12\x16\n\x0euptime_minutes\x18\x01 \x01(\x05*t\n\x0cProperyQuery\x12\x07\n\x03\x41LL\x10\x00\x12\x07\n\x03\x46\x41N\x10\x01\x12\t\n\x05LIGHT\x10\x02\x12\x1e\n\x1a\x46IRMWARE_MORE_DATETIME_API\x10\x03\x12\x0b\n\x07NETWORK\x10\x04\x12\r\n\tSCHEDULES\x10\x05\x12\x0b\n\x07SENSORS\x10\x06*&\n\tOffOnAuto\x12\x07\n\x03OFF\x10\x00\x12\x06\n\x02ON\x10\x01\x12\x08\n\x04\x41UTO\x10\x02') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'proto.aiobafi6_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None _PROPERYQUERY._serialized_start=2214 _PROPERYQUERY._serialized_end=2330 _OFFONAUTO._serialized_start=2332 _OFFONAUTO._serialized_end=2370 _ROOT._serialized_start=34 _ROOT._serialized_end=72 _ROOT2._serialized_start=74 _ROOT2._serialized_end=192 _COMMIT._serialized_start=194 _COMMIT._serialized_end=244 _QUERY._serialized_start=246 _QUERY._serialized_end=301 _QUERYRESULT._serialized_start=303 _QUERYRESULT._serialized_end=397 _PROPERTIES._serialized_start=400 _PROPERTIES._serialized_end=1959 _FIRMWAREPROPERTIES._serialized_start=1961 _FIRMWAREPROPERTIES._serialized_end=2056 _CAPABILITIES._serialized_start=2058 _CAPABILITIES._serialized_end=2135 _WIFIPROPERTIES._serialized_start=2137 _WIFIPROPERTIES._serialized_end=2167 _SCHEDULE._serialized_start=2169 _SCHEDULE._serialized_end=2179 _STATS._serialized_start=2181 _STATS._serialized_end=2212 # @@protoc_insertion_point(module_scope) jfroy-aiobafi6-311c9f2/aiobafi6/proto/aiobafi6_pb2.pyi000066400000000000000000000256431447142365600224720ustar00rootroot00000000000000from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union ALL: ProperyQuery AUTO: OffOnAuto DESCRIPTOR: _descriptor.FileDescriptor FAN: ProperyQuery FIRMWARE_MORE_DATETIME_API: ProperyQuery LIGHT: ProperyQuery NETWORK: ProperyQuery OFF: OffOnAuto ON: OffOnAuto SCHEDULES: ProperyQuery SENSORS: ProperyQuery class Capabilities(_message.Message): __slots__ = ["has_comfort1", "has_comfort3", "has_light"] HAS_COMFORT1_FIELD_NUMBER: _ClassVar[int] HAS_COMFORT3_FIELD_NUMBER: _ClassVar[int] HAS_LIGHT_FIELD_NUMBER: _ClassVar[int] has_comfort1: bool has_comfort3: bool has_light: bool def __init__(self, has_comfort1: bool = ..., has_comfort3: bool = ..., has_light: bool = ...) -> None: ... class Commit(_message.Message): __slots__ = ["properties"] PROPERTIES_FIELD_NUMBER: _ClassVar[int] properties: Properties def __init__(self, properties: _Optional[_Union[Properties, _Mapping]] = ...) -> None: ... class FirmwareProperties(_message.Message): __slots__ = ["bootloader_version", "firmware_version", "mac_address"] BOOTLOADER_VERSION_FIELD_NUMBER: _ClassVar[int] FIRMWARE_VERSION_FIELD_NUMBER: _ClassVar[int] MAC_ADDRESS_FIELD_NUMBER: _ClassVar[int] bootloader_version: str firmware_version: str mac_address: str def __init__(self, firmware_version: _Optional[str] = ..., bootloader_version: _Optional[str] = ..., mac_address: _Optional[str] = ...) -> None: ... class Properties(_message.Message): __slots__ = ["api_endpoint", "api_version", "auto_comfort_enable", "capabilities", "comfort_heat_assist_enable", "comfort_heat_assist_reverse_enable", "comfort_heat_assist_speed", "comfort_ideal_temperature", "comfort_max_speed", "comfort_min_speed", "current_rpm", "dns_sd_uuid", "eco_enable", "fan_beep_enable", "fan_mode", "fan_occupancy_detected", "firmware", "firmware_version", "humidity", "ip_address", "led_indicators_enable", "legacy_ir_remote_enable", "light_auto_motion_timeout", "light_brightness_level", "light_brightness_percent", "light_color_temperature", "light_coolest_color_temperature", "light_dim_to_warm_enable", "light_mode", "light_occupancy_detected", "light_return_to_auto_enable", "light_return_to_auto_timeout", "light_warmest_color_temperature", "local_datetime", "mac_address", "model", "motion_sense_enable", "motion_sense_timeout", "name", "remote_firmware", "return_to_auto_enable", "return_to_auto_timeout", "reverse_enable", "speed", "speed_percent", "stats", "target_rpm", "temperature", "utc_datetime", "uuid9", "whoosh_enable", "wifi"] API_ENDPOINT_FIELD_NUMBER: _ClassVar[int] API_VERSION_FIELD_NUMBER: _ClassVar[int] AUTO_COMFORT_ENABLE_FIELD_NUMBER: _ClassVar[int] CAPABILITIES_FIELD_NUMBER: _ClassVar[int] COMFORT_HEAT_ASSIST_ENABLE_FIELD_NUMBER: _ClassVar[int] COMFORT_HEAT_ASSIST_REVERSE_ENABLE_FIELD_NUMBER: _ClassVar[int] COMFORT_HEAT_ASSIST_SPEED_FIELD_NUMBER: _ClassVar[int] COMFORT_IDEAL_TEMPERATURE_FIELD_NUMBER: _ClassVar[int] COMFORT_MAX_SPEED_FIELD_NUMBER: _ClassVar[int] COMFORT_MIN_SPEED_FIELD_NUMBER: _ClassVar[int] CURRENT_RPM_FIELD_NUMBER: _ClassVar[int] DNS_SD_UUID_FIELD_NUMBER: _ClassVar[int] ECO_ENABLE_FIELD_NUMBER: _ClassVar[int] FAN_BEEP_ENABLE_FIELD_NUMBER: _ClassVar[int] FAN_MODE_FIELD_NUMBER: _ClassVar[int] FAN_OCCUPANCY_DETECTED_FIELD_NUMBER: _ClassVar[int] FIRMWARE_FIELD_NUMBER: _ClassVar[int] FIRMWARE_VERSION_FIELD_NUMBER: _ClassVar[int] HUMIDITY_FIELD_NUMBER: _ClassVar[int] IP_ADDRESS_FIELD_NUMBER: _ClassVar[int] LED_INDICATORS_ENABLE_FIELD_NUMBER: _ClassVar[int] LEGACY_IR_REMOTE_ENABLE_FIELD_NUMBER: _ClassVar[int] LIGHT_AUTO_MOTION_TIMEOUT_FIELD_NUMBER: _ClassVar[int] LIGHT_BRIGHTNESS_LEVEL_FIELD_NUMBER: _ClassVar[int] LIGHT_BRIGHTNESS_PERCENT_FIELD_NUMBER: _ClassVar[int] LIGHT_COLOR_TEMPERATURE_FIELD_NUMBER: _ClassVar[int] LIGHT_COOLEST_COLOR_TEMPERATURE_FIELD_NUMBER: _ClassVar[int] LIGHT_DIM_TO_WARM_ENABLE_FIELD_NUMBER: _ClassVar[int] LIGHT_MODE_FIELD_NUMBER: _ClassVar[int] LIGHT_OCCUPANCY_DETECTED_FIELD_NUMBER: _ClassVar[int] LIGHT_RETURN_TO_AUTO_ENABLE_FIELD_NUMBER: _ClassVar[int] LIGHT_RETURN_TO_AUTO_TIMEOUT_FIELD_NUMBER: _ClassVar[int] LIGHT_WARMEST_COLOR_TEMPERATURE_FIELD_NUMBER: _ClassVar[int] LOCAL_DATETIME_FIELD_NUMBER: _ClassVar[int] MAC_ADDRESS_FIELD_NUMBER: _ClassVar[int] MODEL_FIELD_NUMBER: _ClassVar[int] MOTION_SENSE_ENABLE_FIELD_NUMBER: _ClassVar[int] MOTION_SENSE_TIMEOUT_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] REMOTE_FIRMWARE_FIELD_NUMBER: _ClassVar[int] RETURN_TO_AUTO_ENABLE_FIELD_NUMBER: _ClassVar[int] RETURN_TO_AUTO_TIMEOUT_FIELD_NUMBER: _ClassVar[int] REVERSE_ENABLE_FIELD_NUMBER: _ClassVar[int] SPEED_FIELD_NUMBER: _ClassVar[int] SPEED_PERCENT_FIELD_NUMBER: _ClassVar[int] STATS_FIELD_NUMBER: _ClassVar[int] TARGET_RPM_FIELD_NUMBER: _ClassVar[int] TEMPERATURE_FIELD_NUMBER: _ClassVar[int] UTC_DATETIME_FIELD_NUMBER: _ClassVar[int] UUID9_FIELD_NUMBER: _ClassVar[int] WHOOSH_ENABLE_FIELD_NUMBER: _ClassVar[int] WIFI_FIELD_NUMBER: _ClassVar[int] api_endpoint: str api_version: str auto_comfort_enable: bool capabilities: Capabilities comfort_heat_assist_enable: bool comfort_heat_assist_reverse_enable: bool comfort_heat_assist_speed: int comfort_ideal_temperature: int comfort_max_speed: int comfort_min_speed: int current_rpm: int dns_sd_uuid: str eco_enable: bool fan_beep_enable: bool fan_mode: OffOnAuto fan_occupancy_detected: bool firmware: FirmwareProperties firmware_version: str humidity: int ip_address: str led_indicators_enable: bool legacy_ir_remote_enable: bool light_auto_motion_timeout: int light_brightness_level: int light_brightness_percent: int light_color_temperature: int light_coolest_color_temperature: int light_dim_to_warm_enable: bool light_mode: OffOnAuto light_occupancy_detected: bool light_return_to_auto_enable: bool light_return_to_auto_timeout: int light_warmest_color_temperature: int local_datetime: str mac_address: str model: str motion_sense_enable: bool motion_sense_timeout: int name: str remote_firmware: FirmwareProperties return_to_auto_enable: bool return_to_auto_timeout: int reverse_enable: bool speed: int speed_percent: int stats: Stats target_rpm: int temperature: int utc_datetime: str uuid9: str whoosh_enable: bool wifi: WifiProperties def __init__(self, name: _Optional[str] = ..., model: _Optional[str] = ..., local_datetime: _Optional[str] = ..., utc_datetime: _Optional[str] = ..., firmware_version: _Optional[str] = ..., mac_address: _Optional[str] = ..., uuid9: _Optional[str] = ..., dns_sd_uuid: _Optional[str] = ..., api_endpoint: _Optional[str] = ..., api_version: _Optional[str] = ..., firmware: _Optional[_Union[FirmwareProperties, _Mapping]] = ..., capabilities: _Optional[_Union[Capabilities, _Mapping]] = ..., fan_mode: _Optional[_Union[OffOnAuto, str]] = ..., reverse_enable: bool = ..., speed_percent: _Optional[int] = ..., speed: _Optional[int] = ..., whoosh_enable: bool = ..., eco_enable: bool = ..., auto_comfort_enable: bool = ..., comfort_ideal_temperature: _Optional[int] = ..., comfort_heat_assist_enable: bool = ..., comfort_heat_assist_speed: _Optional[int] = ..., comfort_heat_assist_reverse_enable: bool = ..., comfort_min_speed: _Optional[int] = ..., comfort_max_speed: _Optional[int] = ..., motion_sense_enable: bool = ..., motion_sense_timeout: _Optional[int] = ..., return_to_auto_enable: bool = ..., return_to_auto_timeout: _Optional[int] = ..., target_rpm: _Optional[int] = ..., current_rpm: _Optional[int] = ..., fan_occupancy_detected: bool = ..., light_mode: _Optional[_Union[OffOnAuto, str]] = ..., light_brightness_percent: _Optional[int] = ..., light_brightness_level: _Optional[int] = ..., light_color_temperature: _Optional[int] = ..., light_dim_to_warm_enable: bool = ..., light_auto_motion_timeout: _Optional[int] = ..., light_return_to_auto_enable: bool = ..., light_return_to_auto_timeout: _Optional[int] = ..., light_warmest_color_temperature: _Optional[int] = ..., light_coolest_color_temperature: _Optional[int] = ..., light_occupancy_detected: bool = ..., temperature: _Optional[int] = ..., humidity: _Optional[int] = ..., ip_address: _Optional[str] = ..., wifi: _Optional[_Union[WifiProperties, _Mapping]] = ..., led_indicators_enable: bool = ..., fan_beep_enable: bool = ..., legacy_ir_remote_enable: bool = ..., remote_firmware: _Optional[_Union[FirmwareProperties, _Mapping]] = ..., stats: _Optional[_Union[Stats, _Mapping]] = ...) -> None: ... class Query(_message.Message): __slots__ = ["property_query"] PROPERTY_QUERY_FIELD_NUMBER: _ClassVar[int] property_query: ProperyQuery def __init__(self, property_query: _Optional[_Union[ProperyQuery, str]] = ...) -> None: ... class QueryResult(_message.Message): __slots__ = ["properties", "schedules"] PROPERTIES_FIELD_NUMBER: _ClassVar[int] SCHEDULES_FIELD_NUMBER: _ClassVar[int] properties: _containers.RepeatedCompositeFieldContainer[Properties] schedules: _containers.RepeatedCompositeFieldContainer[Schedule] def __init__(self, properties: _Optional[_Iterable[_Union[Properties, _Mapping]]] = ..., schedules: _Optional[_Iterable[_Union[Schedule, _Mapping]]] = ...) -> None: ... class Root(_message.Message): __slots__ = ["root2"] ROOT2_FIELD_NUMBER: _ClassVar[int] root2: Root2 def __init__(self, root2: _Optional[_Union[Root2, _Mapping]] = ...) -> None: ... class Root2(_message.Message): __slots__ = ["commit", "query", "query_result"] COMMIT_FIELD_NUMBER: _ClassVar[int] QUERY_FIELD_NUMBER: _ClassVar[int] QUERY_RESULT_FIELD_NUMBER: _ClassVar[int] commit: Commit query: Query query_result: QueryResult def __init__(self, commit: _Optional[_Union[Commit, _Mapping]] = ..., query: _Optional[_Union[Query, _Mapping]] = ..., query_result: _Optional[_Union[QueryResult, _Mapping]] = ...) -> None: ... class Schedule(_message.Message): __slots__ = [] def __init__(self) -> None: ... class Stats(_message.Message): __slots__ = ["uptime_minutes"] UPTIME_MINUTES_FIELD_NUMBER: _ClassVar[int] uptime_minutes: int def __init__(self, uptime_minutes: _Optional[int] = ...) -> None: ... class WifiProperties(_message.Message): __slots__ = ["ssid"] SSID_FIELD_NUMBER: _ClassVar[int] ssid: str def __init__(self, ssid: _Optional[str] = ...) -> None: ... class ProperyQuery(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = [] class OffOnAuto(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = [] jfroy-aiobafi6-311c9f2/aiobafi6/protoprop.py000066400000000000000000000075541447142365600210000ustar00rootroot00000000000000"""aiobafi6 protobuf properties. Descriptor for `Device` attributes backed by a `Properties` protobuf field. """ from __future__ import annotations import typing as t from enum import IntEnum from google.protobuf.message import Message from .const import MIN_API_VERSION from .proto import aiobafi6_pb2 __all__ = ( "ProtoProp", "OffOnAuto", "ClosedIntervalValidator", "maybe_proto_field", "to_proto_temperature", "from_proto_temperature", "from_proto_humidity", ) T = t.TypeVar("T") TLT = t.TypeVar("TLT", bound="SupportsLessThan") class ProtoProp(t.Generic[T]): """Descriptor for `Device` properties backed by a protobuf field. The descriptor has a public `min_api_version` that clients aware of ProtoProp can use to check if a particular property is supported by a given `Device` based on its `api_version` property. """ __slots__ = ( "_writable", "_field_name", "_to_proto", "_from_proto", "_name", "min_api_version", ) def __init__( self, writable: bool = False, field_name: t.Optional[str] = None, to_proto: t.Optional[t.Callable[[T], t.Any]] = None, from_proto: t.Optional[t.Callable[[t.Any], t.Optional[T]]] = None, min_api_version: int = MIN_API_VERSION, ): self._name = None self._writable = writable self._field_name = field_name self.min_api_version = min_api_version def ident(val: t.Any) -> T: return t.cast(T, val) if to_proto is None: to_proto = ident self._to_proto = to_proto if from_proto is None: from_proto = ident self._from_proto = from_proto def __set_name__(self, owner: type[object], name: str) -> None: self._name = name if self._field_name is None: self._field_name = name def __get__(self, obj: t.Any, objtype: type[object]) -> t.Optional[T]: val = t.cast(t.Optional[T], obj._maybe_property(self._name)) if val is None: return val return self._from_proto(val) def __set__(self, obj: t.Any, value: T): if not self._writable: raise AttributeError(f"can't set attribute {self._name}") props = aiobafi6_pb2.Properties() # pylint: disable=no-member setattr(props, t.cast(str, self._field_name), self._to_proto(value)) obj._commit_property(props) class OffOnAuto(IntEnum): """Tri-state mode enum that matches the protocol buffer.""" OFF = 0 ON = 1 AUTO = 2 def maybe_proto_field(message: Message, field: str) -> t.Optional[t.Any]: """Returns the value of `field` in `message` or `None` if not set.""" return getattr(message, field) if message.HasField(field) else None class ClosedIntervalValidator(t.Generic[TLT]): # pylint: disable=invalid-name """Callable that checks if an input value is within the closed interval [a, b].""" __slots__ = ("a", "b") def __init__(self, a: TLT, b: TLT): self.a = a self.b = b def __call__(self, val: TLT) -> TLT: if val < self.a: raise ValueError(f"value must be inside [{self.a}, {self.b}]") if self.b < val: raise ValueError(f"value must be inside [{self.a}, {self.b}]") return val def to_proto_temperature(val: float) -> int: """Return val multiplied by 100 as an int.""" return int(val * 100.0) def from_proto_temperature(val: int) -> float: """Return val divided by 100 as a float.""" return float(val) / 100.0 def from_proto_humidity(val: int) -> t.Optional[int]: """Return val if it is in the [0, 100] interval, otherwise None.""" if val < 0 or val > 100: return None return val class SupportsLessThan(t.Protocol): # pylint: disable=missing-class-docstring def __lt__(self, __other: t.Any) -> bool: ... jfroy-aiobafi6-311c9f2/aiobafi6/protoprop_test.py000066400000000000000000000054221447142365600220270ustar00rootroot00000000000000# pylint: disable=protected-access, missing-class-docstring, missing-function-docstring, invalid-name, too-few-public-methods """Tests for protoprop.""" import typing as t import pytest from google.protobuf.message import Message from .const import MIN_API_VERSION from .proto import aiobafi6_pb2 from .protoprop import ( ClosedIntervalValidator, OffOnAuto, ProtoProp, from_proto_humidity, from_proto_temperature, maybe_proto_field, to_proto_temperature, ) class FakeDevice: """A class that implements the interface expected by ProtoProp.""" def __init__(self): self.properties = aiobafi6_pb2.Properties() # pylint: disable=no-member def _maybe_property(self, field: str) -> t.Optional[t.Any]: return maybe_proto_field(t.cast(Message, self.properties), field) def _commit_property( self, p: aiobafi6_pb2.Properties # pylint: disable=no-member ) -> None: self.properties.MergeFrom(p) def test_off_on_auto(): class D(FakeDevice): fan_mode = ProtoProp[OffOnAuto]( writable=True, from_proto=lambda v: OffOnAuto(v), # pylint: disable=unnecessary-lambda ) d = D() d.properties.fan_mode = aiobafi6_pb2.AUTO # pylint: disable=no-member assert d.fan_mode == OffOnAuto.AUTO d.fan_mode = OffOnAuto.AUTO with pytest.raises(ValueError): d.properties.fan_mode = t.cast( aiobafi6_pb2.OffOnAuto, 3 # pylint: disable=no-member ) def test_temperature(): class D(FakeDevice): temperature = ProtoProp[float]( writable=True, to_proto=to_proto_temperature, from_proto=from_proto_temperature, ) d = D() d.properties.temperature = 2250 assert d.temperature == 22.5 d.temperature = 23.5 assert d.properties.temperature == 2350 def test_humidity(): class D(FakeDevice): humidity = ProtoProp[int](from_proto=from_proto_humidity) d = D() for i in range(100): d.properties.humidity = i assert d.humidity == i d.properties.humidity = -1 assert d.humidity is None d.properties.humidity = 101 assert d.humidity is None d.properties.humidity = 1000 assert d.humidity is None def test_closed_interval(): class D(FakeDevice): speed = ProtoProp[int]( writable=True, to_proto=ClosedIntervalValidator[int](0, 0) ) d = D() d.speed = 0 with pytest.raises(ValueError): d.speed = -1 with pytest.raises(ValueError): d.speed = 1 def test_min_api_version(): class D(FakeDevice): prop1 = ProtoProp[int]() prop2 = ProtoProp[int](min_api_version=5) assert vars(D)["prop1"].min_api_version == MIN_API_VERSION assert vars(D)["prop2"].min_api_version == 5 jfroy-aiobafi6-311c9f2/aiobafi6/wireutils.py000066400000000000000000000027371447142365600207610ustar00rootroot00000000000000"""Utilities for BAF message encoding and decoding. BAF uses SLIP (Serial Line IP, https://datatracker.ietf.org/doc/html/rfc1055.html) to frame protocol buffer messages on a TCP/IP stream connection. """ from __future__ import annotations from google.protobuf.message import Message def serialize(message: Message) -> bytes: """Serialize `message` to bytes and put it in a SLIP frame.""" buf = bytearray([0xC0]) buf.extend(add_emulation_prevention(message.SerializeToString())) buf.append(0xC0) return buf def add_emulation_prevention(buf: bytes) -> bytes: """Add emulation prevention sequences (SLIP ESC).""" obuf = bytearray() for val in buf: if val == 0xC0: obuf.extend((0xDB, 0xDC)) elif val == 0xDB: obuf.extend((0xDB, 0xDD)) else: obuf.append(val) return bytes(obuf) def remove_emulation_prevention(buf: bytes) -> bytes: """Remove emulation prevention sequences (SLIP ESC).""" obuf = bytearray() eps = False for val in buf: if val == 0xDB: eps = True elif eps: if val == 0xDC: obuf.append(0xC0) elif val == 0xDD: obuf.append(0xDB) else: raise ValueError("invalid emulation prevention sequence") eps = False else: obuf.append(val) if eps: raise ValueError("truncated emulation prevention sequence") return bytes(obuf) jfroy-aiobafi6-311c9f2/aiobafi6/wireutils_test.py000066400000000000000000000025201447142365600220060ustar00rootroot00000000000000# pylint: disable=protected-access, missing-class-docstring, missing-function-docstring, invalid-name, line-too-long """Tests for wireutils.""" from . import wireutils def test_add_emulation_prevention(): got = wireutils.add_emulation_prevention(b"\xc0") assert got == b"\xdb\xdc" got = wireutils.add_emulation_prevention(b"\xdb") assert got == b"\xdb\xdd" got = wireutils.add_emulation_prevention( b'\x12+")\x12\r\n\x0bLiving Room\x12\x04\x18\xc0\xbe\x01\x12\x03\xb0\x08\x00\x12\x03\xb8\x08\x00\x12\x03\xc0\x08\x01\x12\x03\xb0\t\x00' ) assert ( got == b'\x12+")\x12\r\n\x0bLiving Room\x12\x04\x18\xdb\xdc\xbe\x01\x12\x03\xb0\x08\x00\x12\x03\xb8\x08\x00\x12\x03\xdb\xdc\x08\x01\x12\x03\xb0\t\x00' ) def test_remove_emulation_prevention(): got = wireutils.remove_emulation_prevention(b"\xdb\xdc") assert got == b"\xc0" got = wireutils.remove_emulation_prevention(b"\xdb\xdd") assert got == b"\xdb" got = wireutils.remove_emulation_prevention( b'\x12+")\x12\r\n\x0bLiving Room\x12\x04\x18\xdb\xdc\xbe\x01\x12\x03\xb0\x08\x00\x12\x03\xb8\x08\x00\x12\x03\xdb\xdc\x08\x01\x12\x03\xb0\t\x00' ) assert ( got == b'\x12+")\x12\r\n\x0bLiving Room\x12\x04\x18\xc0\xbe\x01\x12\x03\xb0\x08\x00\x12\x03\xb8\x08\x00\x12\x03\xc0\x08\x01\x12\x03\xb0\t\x00' ) jfroy-aiobafi6-311c9f2/poetry.lock000066400000000000000000001115571447142365600170750ustar00rootroot00000000000000# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "astroid" version = "2.15.6" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.7.2" files = [ {file = "astroid-2.15.6-py3-none-any.whl", hash = "sha256:389656ca57b6108f939cf5d2f9a2a825a3be50ba9d589670f393236e0a03b91c"}, {file = "astroid-2.15.6.tar.gz", hash = "sha256:903f024859b7c7687d7a7f3a3f73b17301f8e42dfd9cc9df9d4418172d3e2dbd"}, ] [package.dependencies] lazy-object-proxy = ">=1.4.0" typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} wrapt = [ {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, ] [[package]] name = "async-timeout" version = "4.0.2" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.6" files = [ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, ] [[package]] name = "black" version = "23.7.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "click" version = "8.1.3" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] name = "dill" version = "0.3.6" description = "serialize all of python" optional = false python-versions = ">=3.7" files = [ {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, ] [package.extras] graph = ["objgraph (>=1.7.2)"] [[package]] name = "exceptiongroup" version = "1.1.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, ] [package.extras] test = ["pytest (>=6)"] [[package]] name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" pyflakes = ">=3.1.0,<3.2.0" [[package]] name = "ifaddr" version = "0.2.0" description = "Cross-platform network interface and IP address enumeration library" optional = false python-versions = "*" files = [ {file = "ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748"}, {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, ] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" files = [ {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, ] [package.extras] colors = ["colorama (>=0.4.3)"] pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." optional = false python-versions = ">=3.7" files = [ {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, ] [[package]] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "packaging" version = "23.1" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] [[package]] name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, ] [[package]] name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" files = [ {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, ] [[package]] name = "platformdirs" version = "3.8.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ {file = "platformdirs-3.8.0-py3-none-any.whl", hash = "sha256:ca9ed98ce73076ba72e092b23d3c93ea6c4e186b3f1c3dad6edd98ff6ffcca2e"}, {file = "platformdirs-3.8.0.tar.gz", hash = "sha256:b0cabcb11063d21a0b261d557acb0a9d2126350e63b70cdf7db6347baea456dc"}, ] [package.extras] docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.7" files = [ {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "poethepoet" version = "0.22.0" description = "A task runner that works well with poetry." optional = false python-versions = ">=3.8" files = [ {file = "poethepoet-0.22.0-py3-none-any.whl", hash = "sha256:f654e52c19b7c689d5293ab6a065787b21f125884c0b367650292df4f3cb508c"}, {file = "poethepoet-0.22.0.tar.gz", hash = "sha256:659d7678fd8b349bd40941e3de7d6d386171dab3e7c8babcdcd8ead288c9ea47"}, ] [package.dependencies] pastel = ">=0.2.1,<0.3.0" tomli = ">=1.2.2" [package.extras] poetry-plugin = ["poetry (>=1.0,<2.0)"] [[package]] name = "protobuf" version = "4.24.1" description = "" optional = false python-versions = ">=3.7" files = [ {file = "protobuf-4.24.1-cp310-abi3-win32.whl", hash = "sha256:d414199ca605eeb498adc4d2ba82aedc0379dca4a7c364ff9bc9a179aa28e71b"}, {file = "protobuf-4.24.1-cp310-abi3-win_amd64.whl", hash = "sha256:5906c5e79ff50fe38b2d49d37db5874e3c8010826f2362f79996d83128a8ed9b"}, {file = "protobuf-4.24.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:970c701ee16788d74f3de20938520d7a0aebc7e4fff37096a48804c80d2908cf"}, {file = "protobuf-4.24.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fc361148e902949dcb953bbcb148c99fe8f8854291ad01107e4120361849fd0e"}, {file = "protobuf-4.24.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:5d32363d14aca6e5c9e9d5918ad8fb65b091b6df66740ae9de50ac3916055e43"}, {file = "protobuf-4.24.1-cp37-cp37m-win32.whl", hash = "sha256:df015c47d6855b8efa0b9be706c70bf7f050a4d5ac6d37fb043fbd95157a0e25"}, {file = "protobuf-4.24.1-cp37-cp37m-win_amd64.whl", hash = "sha256:d4af4fd9e9418e819be30f8df2a16e72fbad546a7576ac7f3653be92a6966d30"}, {file = "protobuf-4.24.1-cp38-cp38-win32.whl", hash = "sha256:302e8752c760549ed4c7a508abc86b25d46553c81989343782809e1a062a2ef9"}, {file = "protobuf-4.24.1-cp38-cp38-win_amd64.whl", hash = "sha256:06437f0d4bb0d5f29e3d392aba69600188d4be5ad1e0a3370e581a9bf75a3081"}, {file = "protobuf-4.24.1-cp39-cp39-win32.whl", hash = "sha256:0b2b224e9541fe9f046dd7317d05f08769c332b7e4c54d93c7f0f372dedb0b1a"}, {file = "protobuf-4.24.1-cp39-cp39-win_amd64.whl", hash = "sha256:bd39b9094a4cc003a1f911b847ab379f89059f478c0b611ba1215053e295132e"}, {file = "protobuf-4.24.1-py3-none-any.whl", hash = "sha256:55dd644adc27d2a624339332755fe077c7f26971045b469ebb9732a69ce1f2ca"}, {file = "protobuf-4.24.1.tar.gz", hash = "sha256:44837a5ed9c9418ad5d502f89f28ba102e9cd172b6668bc813f21716f9273348"}, ] [[package]] name = "pycodestyle" version = "2.11.0" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, ] [[package]] name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" files = [ {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, ] [[package]] name = "pylint" version = "2.17.5" description = "python code static checker" optional = false python-versions = ">=3.7.2" files = [ {file = "pylint-2.17.5-py3-none-any.whl", hash = "sha256:73995fb8216d3bed149c8d51bba25b2c52a8251a2c8ac846ec668ce38fab5413"}, {file = "pylint-2.17.5.tar.gz", hash = "sha256:f7b601cbc06fef7e62a754e2b41294c2aa31f1cb659624b9a85bcba29eaf8252"}, ] [package.dependencies] astroid = ">=2.15.6,<=2.17.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, ] isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} tomlkit = ">=0.10.1" [package.extras] spelling = ["pyenchant (>=3.2,<4.0)"] testutils = ["gitpython (>3)"] [[package]] name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" version = "0.21.1" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" files = [ {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, ] [package.dependencies] pytest = ">=7.0.0" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] [[package]] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.7" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] [[package]] name = "tomlkit" version = "0.11.8" description = "Style preserving TOML library" optional = false python-versions = ">=3.7" files = [ {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, ] [[package]] name = "typing-extensions" version = "4.6.3" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, ] [[package]] name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] [[package]] name = "zeroconf" version = "0.39.4" description = "Pure Python Multicast DNS Service Discovery Library (Bonjour/Avahi compatible)" optional = false python-versions = ">=3.7" files = [ {file = "zeroconf-0.39.4-py3-none-any.whl", hash = "sha256:d60eae9e9c99d1a168ce9ff9de7e7398c23754a0c2004ded230f8d529c5260a0"}, {file = "zeroconf-0.39.4.tar.gz", hash = "sha256:701e4d697f89fe952aa9c13a512ed6bf472dcf4f0a6d275e71085604b3882295"}, ] [package.dependencies] async-timeout = ">=4.0.1" ifaddr = ">=0.1.7" [metadata] lock-version = "2.0" python-versions = ">=3.10.0" content-hash = "e10407fada4f0f6c5b8ee64ca0bd5949c74a259277d70f1a09c852210d9acfbd" jfroy-aiobafi6-311c9f2/proto/000077500000000000000000000000001447142365600160325ustar00rootroot00000000000000jfroy-aiobafi6-311c9f2/proto/aiobafi6.proto000066400000000000000000000100131447142365600205720ustar00rootroot00000000000000syntax = "proto2"; package aiobafi6; message Root { optional Root2 root2 = 2; } message Root2 { optional Commit commit = 2; optional Query query = 3; optional QueryResult query_result = 4; } message Commit { optional Properties properties = 3; } enum ProperyQuery { ALL = 0; FAN = 1; LIGHT = 2; FIRMWARE_MORE_DATETIME_API = 3; NETWORK = 4; SCHEDULES = 5; SENSORS = 6; } message Query { optional ProperyQuery property_query = 1; } message QueryResult { repeated Properties properties = 2; repeated Schedule schedules = 3; } enum OffOnAuto { OFF = 0; ON = 1; AUTO = 2; } message Properties { // General optional string name = 1; optional string model = 2; optional string local_datetime = 4; // ISO 8601 optional string utc_datetime = 5; // ISO 8601 optional string firmware_version = 7; optional string mac_address = 8; // API optional string uuid9 = 9; optional string dns_sd_uuid = 10; // uuid used for _api._tcp dns service optional string api_endpoint = 11; optional string api_version = 13; optional FirmwareProperties firmware = 16; optional Capabilities capabilities = 17; // Fan optional OffOnAuto fan_mode = 43; optional bool reverse_enable = 44; optional int32 speed_percent = 45; // 0-100 optional int32 speed = 46; // 0-7 optional bool whoosh_enable = 58; optional bool eco_enable = 65; optional bool auto_comfort_enable = 47; optional int32 comfort_ideal_temperature = 48; // divide by 100.0, in ºC optional bool comfort_heat_assist_enable = 60; optional int32 comfort_heat_assist_speed = 61; optional bool comfort_heat_assist_reverse_enable = 62; optional int32 comfort_min_speed = 50; // 0 for no min optional int32 comfort_max_speed = 51; // 7 for no max optional bool motion_sense_enable = 52; optional int32 motion_sense_timeout = 53; // in s optional bool return_to_auto_enable = 54; optional int32 return_to_auto_timeout = 55; // in s optional int32 target_rpm = 63; optional int32 current_rpm = 64; // Resets after about 4 minutes 30 seconds. // Speculatively placed in the fan category as it is returned with other // fan properties by the firmware. Arguably could be named "fast occupancy". optional bool fan_occupancy_detected = 66; // `api_version` >= 5 // Light optional OffOnAuto light_mode = 68; optional int32 light_brightness_percent = 69; // 0-100 optional int32 light_brightness_level = 70; // 0-16 optional int32 light_color_temperature = 71; optional bool light_dim_to_warm_enable = 77; optional int32 light_auto_motion_timeout = 73; optional bool light_return_to_auto_enable = 74; optional int32 light_return_to_auto_timeout = 75; optional int32 light_warmest_color_temperature = 78; optional int32 light_coolest_color_temperature = 79; // Resets after about 9 minutes 30 seconds. // Speculatively placed in the light category as it is returned with other // light properties by the firmware. Arguably could be named "slow occupancy". // And given its field ID, it's odd that it's not in the sensor category. optional bool light_occupancy_detected = 85; // `api_version` >= 5 // Sensors optional int32 temperature = 86; // divide by 100.0, in ºC optional int32 humidity = 87; // percent // Connectivity optional string ip_address = 120; optional WifiProperties wifi = 124; // More optional bool led_indicators_enable = 134; optional bool fan_beep_enable = 135; optional bool legacy_ir_remote_enable = 136; // Controls optional FirmwareProperties remote_firmware = 152; // Speculatively called stats because it contains uptime. optional Stats stats = 156; } message FirmwareProperties { optional string firmware_version = 2; optional string bootloader_version = 3; optional string mac_address = 4; } message Capabilities { optional bool has_comfort1 = 1; optional bool has_comfort3 = 3; optional bool has_light = 4; } message WifiProperties { optional string ssid = 1; } message Schedule {} message Stats { optional int32 uptime_minutes = 1; } jfroy-aiobafi6-311c9f2/pyproject.toml000066400000000000000000000024041447142365600176030ustar00rootroot00000000000000[tool.poetry] name = "aiobafi6" version = "0.9.0" description = "Big Ass Fans i6/Haiku protocol asynchronous Python library" authors = ["Jean-Francois Roy "] license = "Apache-2.0" readme = "README.md" homepage = "https://github.com/jfroy/aiobafi6" repository = "https://github.com/jfroy/aiobafi6" keywords = ["BigAssFans", "i6", "Haiku", "SenseME"] classifiers = [ "Framework :: AsyncIO", "Intended Audience :: Developers", "Topic :: Home Automation", ] [tool.poetry.dependencies] python = ">=3.10.0" protobuf = ">=3.20" zeroconf = ">=0.38" [tool.poetry.dev-dependencies] black = "^23.7.0" isort = "^5.12.0" poethepoet = "^0.22.0" pytest = "^7.4.0" pytest-asyncio = "^0.21" flake8 = "^6.1.0" pylint = "^2.17.5" [tool.poetry.scripts] aiobafi6 = 'aiobafi6.cmd.main:main' [tool.poe.tasks.protoc] cmd = "protoc --python_out=aiobafi6 --pyi_out=aiobafi6 proto/aiobafi6.proto" help = "Generate Python protobuf files" [tool.isort] profile = "black" src_paths = ["aiobafi6"] skip_glob = ["aiobafi6/proto/*"] [tool.pytest.ini_options] asyncio_mode = "auto" [tool.pylint.main] py-version = "3.9" [tool.pylint."messages control"] disable = ["too-many-instance-attributes"] [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" jfroy-aiobafi6-311c9f2/setup.cfg000066400000000000000000000005551447142365600165150ustar00rootroot00000000000000[flake8] exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build,proto doctests = True # To work with Black # E501: line too long # W503: Line break occurred before a binary operator # E203: Whitespace before ':' # D202 No blank lines allowed after function docstring # W504 line break after binary operator ignore = E501, W503, E203, D202, W504