pax_global_header 0000666 0000000 0000000 00000000064 13511520470 0014510 g ustar 00root root 0000000 0000000 52 comment=b3186627a5bc071be36dd91eb81c02b8f8c23943
aioinflux-0.9.0/ 0000775 0000000 0000000 00000000000 13511520470 0013514 5 ustar 00root root 0000000 0000000 aioinflux-0.9.0/.circleci/ 0000775 0000000 0000000 00000000000 13511520470 0015347 5 ustar 00root root 0000000 0000000 aioinflux-0.9.0/.circleci/config.yml 0000664 0000000 0000000 00000002506 13511520470 0017342 0 ustar 00root root 0000000 0000000 ---
version: 2
jobs:
build: &build
docker:
- image: circleci/python:3.7.2
steps:
- checkout
- run:
name: Install
command: sudo pip install -e .
test: &test
docker:
- image: circleci/python:3.7.2
- image: influxdb:latest
- image: redis:latest
steps:
- checkout
- run:
name: Install test extras
command: sudo pip install -e .[test]
- run:
name: Run test
command: make test
- run:
name: Install extras
command: sudo pip install -e .[pandas,cache]
- run:
name: Run test again (w/ Pandas+Redis support)
command: make test
- run:
name: See coverage report
command: cat .coverage
- run:
name: Upload coverage report to Codecov
command: bash <(curl -s https://codecov.io/bash)
build-3.6:
<<: *build
docker:
- image: circleci/python:3.6.8
test-3.6:
<<: *test
docker:
- image: circleci/python:3.6.8
- image: influxdb:latest
- image: redis:latest
workflows:
version: 2
build_and_test:
jobs:
- build
- test:
requires:
- build
build_and_test-3.6:
jobs:
- build-3.6
- test-3.6:
requires:
- build-3.6
aioinflux-0.9.0/.gitignore 0000664 0000000 0000000 00000002126 13511520470 0015505 0 ustar 00root root 0000000 0000000 # Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
README.html
# PyBuilder
target/
# IPython Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# dotenv
.env
# virtualenv
venv/
ENV/
# Spyder project settings
.spyderproject
# Rope project settings
.ropeproject
# Pycharm
.idea/
# macOS
.DS_Store
aioinflux-0.9.0/.readthedocs.yml 0000664 0000000 0000000 00000000143 13511520470 0016600 0 ustar 00root root 0000000 0000000 build:
image: latest
python:
version: 3.6
pip_install: true
extra_requirements:
- docs
aioinflux-0.9.0/CHANGELOG.md 0000664 0000000 0000000 00000014622 13511520470 0015332 0 ustar 00root root 0000000 0000000 # Changelog
## [0.9.0] - 2019-07-11
### Added
- Add support for custom path to InfluxDB (#24)
- Add support for `Decimal` serialization (812c1a8, 100d931)
- Add chunk count on chunked response debugging message (b9e85ad)
### Changed
- Refactor `rm_none` option implementation (5735b51, 13062ed, 89bae37)
- Make enum typevars more strict (f177212)
## [0.8.0] - 2019-05-10
This is version is backwards compatible with v0.7.x
### Added
- Add dataframe support for chunked queries (e3c2a0b)
## [0.7.1] - 2019-04-11
This is version is backwards compatible with v0.7.0
### Fixed
- Don't cache error responses (be7b87c)
### Docs
- Minor wording changes
### Internal
- Minor internal changes
## [0.7.0] - 2019-03-22
This is version is mostly backwards compatible with v0.6.x
(w/ the exception of query patterns functionality)
## Added
- Redis-based caching functionality. See the
[docs](https://aioinflux.readthedocs.io/en/latest/usage.html#caching-query-results) for details.
- Timeout functionality (#21 by @SuminAndrew)
### Changed
- Move `ClientSession` creation logic outside `__init__`.
It is now easier to used advanced ``aiohttp.ClientSession`` options.
See the [docs](https://aioinflux.readthedocs.io/en/latest/usage.html#other-aiohttp-functionality) for details.
### Removed
- Query patterns functionality
### Internal
- Refactor test suite
- Various other internal changes
## [0.6.1] - 2019-02-01
This is version is backwards compatible with v0.6.0
### Fixed
- Type annotation error in Python 3.6 (febfe47)
- Suppress `The object should be created from async function` warning from aiohttp 3.5 (da950e9)
## [0.6.0] - 2019-02-01
### Added
- Support serializing NaN integers in pandas 0.24+
(See [blog post](https://pandas-dev.github.io/pandas-blog/pandas-extension-arrays.html)) (1c55217)
- Support for using `namedtuple` with `iterpoints` (bd93c53)
### Changed
- **[BREAKING]** Changed signature of `parser` argument of `iterpoints`
from `(x, meta)` to `(*x, meta)` (bd93c53)
### Removed
- **[BREAKING]** Removed iterable mode and `InfluxDBResult` / `InfluxDBChunkedResult`.
Use `iterpoints` instead. (592c5ed)
- Deprecated `set_query_pattern` (1d36b07)
### Docs
- Various improvements (8c6cbd3, ce46596, b7db169, ba3edae)
## [0.5.1] - 2019-01-21
This is version is backwards compatible with v0.5.0
### Fixed
- Fix type annotations
- Fix internal API inconsistencies
### Docs
- Complete API section
- Add proper Sphinx links
- Update/fix various sections
## [0.5.0] - 2019-01-17
### Changed
- [BREAKING] Removed `DataPoint` functionality in favor of simpler and more
flexible `@lineprotocol` decorator. See the
[docs](https://aioinflux.readthedocs.io/en/latest/usage.html#writing-user-defined-class-objects) for details.
### Docs
- Added detailed `@lineprotocol` usage
## [0.4.1] - 2018-11-22
### Fixed
- Fixed bug when doing multi-statement queries when using `dataframe` mode
### Docs
- Added note regarding handling of multi-statement/multi-series queries when using `dataframe` mode
## [0.4.0] - 2018-10-22
### Added
- Added ability to write `datapoint` objects. See the
[docs](https://aioinflux.readthedocs.io/en/latest/usage.html#writing-datapoint-objects) for details.
- Added `bytes` output format. This is to facilitate the addition of a caching layer on top of InfluxDB. (cb4e3d1)
### Changed
- Change `write` method signature to match the `/write` endpoint docs
- Allow writing to non-default retention policy (#14)
- (`precision` is not fully implemented yet)
- Renamed `raw` output format to `json`. Most users should be unaffected by this. (cb4e3d1)
### Fixed
- Improved docs
### Internal
- Refactored serialization/parsing functionality into a subpackage
- Fix test warnings (2e42d50)
## [0.3.4] - 2018-09-03
- Fixed `output='dataframe'` parsing bug (#15)
- Removed tag column -> categorical dtype conversion functionality
- Moved documentation to Read The Docs
- Added two query patterns (671013b)
- Added this CHANGELOG
## [0.3.3] - 2018-06-23
- Python 3.7 support
- Sphinx-based documentation hosted at Read the Docs
- Minor dataframe serialization debugging (364190fa)
## [0.3.2] - 2018-05-03
- Fix parsing bug for string ending in a backslash (db8846e)
- Add InfluxDBWriteError exception class (d8d0a01)
- Make InfluxDBClient.db attribute optional (039e088)
## [0.3.1] - 2018-04-29
- Fix bug where timezone-unaware datetime input was assumed to be in local time (#11 / a8c81b7)
- Minor improvement in dataframe parsing (1e33b92)
## [0.3.0] - 2018-04-24
### Highlights:
- Drop Pandas/Numpy requirement (#9)
- Improved iteration support (816a722)
- - Implement tag/key value caching (9a65787)
- Improve dataframe serialization
- Speed improvements (ddc9ecc)
- Memory usage improvements (a2b58bd)
- Disable concatenating of dataframes of the same measurement when grouping by tag (331a0c9)
- Queries now return tag columns with `pd.Categorical` dtype (efdea98)
- Writes now automatically identify `pd.Categorical` dtype columns as tag columns (ddc9ecc)
### API changes:
- `mode` attribute was "split" into `mode` and `output`.
Default behavior remains the same (async / raw).
- Iteration is now made easier through the `iterable` mode
and `InfluxDBResult` and `InfluxDBChunkedResult` classes
## [0.2.0] - 2018-03-20
### Highlights
- Documentation is now complete
- Improved iteration support (via `iter_resp`) (cfffbf5)
- Allow users to add custom query patterns
- Add support for positional arguments in query patterns
- Reimplement `__del__` (40d0a69 / #7)
- Improve/debug dataframe parsing (7beeb53 / 96d78a4)
- Improve write error message (7972946) (by @miracle2k)
### API changes:
- Rename `AsyncInfluxDBClient` to `InfluxDBClient` (54d98c9)
- Change return format of chunked responses (related: cfffbf5 / #6)
- Make some `__init__` arguments keyword-only (5d2edf6)
## [0.1.2] - 2018-02-28
- Add `__aenter__`/`__aexit__` support (5736446) (by @Kargathia)
- Add HTTPS URL support (49b8e89) (by @miracle2k)
- Add Unix socket support (8a8b069) (by @carlos-jenkins)
- Fix bug where tags where not being added to DataFrames when querying (a9f1d82)
## [0.1.1] - 2017-11-10
- Add error handling for chunked responses (db93c20)
- Fix DataFrame tag parsing bug (aa02faa)
- Fix boolean field parsing bug (4c2bff9)
- Increase test coverage
## [0.1.0] - 2017-10-04
Initial release.
The API is relatively stable but there might be some bugs here and there.
Discretion advised when using in production.
aioinflux-0.9.0/LICENSE 0000664 0000000 0000000 00000002060 13511520470 0014517 0 ustar 00root root 0000000 0000000 MIT License
Copyright (c) 2017 Gustavo Bezerra
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
aioinflux-0.9.0/Makefile 0000664 0000000 0000000 00000001101 13511520470 0015145 0 ustar 00root root 0000000 0000000 test:
flake8
pytest --verbose --cov=aioinflux --cov-append --cov-report html --cov-report term tests/
cov: test
open htmlcov/index.html
clean:
rm -rf build dist *.egg-info docs/_build/*
rm -rf .cache htmlcov .coverage .pytest_cache
rm -f .DS_Store README.html
.PHONY: docs
docs:
rm -rf docs/_build/*
cd docs && $(MAKE) html
build: clean test docs
python setup.py sdist bdist_wheel
upload: build
twine upload dist/*
upload-test: build
twine upload --repository testpypi dist/*
readme:
rst2html.py --stylesheet=docs/_static/rst2html.css README.rst README.html
aioinflux-0.9.0/README.rst 0000664 0000000 0000000 00000004452 13511520470 0015210 0 ustar 00root root 0000000 0000000 aioinflux
=========
.. image:: https://img.shields.io/circleci/project/github/gusutabopb/aioinflux/master.svg
:target: https://circleci.com/gh/gusutabopb/aioinflux
:alt: CI status
.. image:: https://img.shields.io/codecov/c/github/gusutabopb/aioinflux.svg
:target: https://codecov.io/gh/gusutabopb/aioinflux
:alt: Coverage
.. image:: https://img.shields.io/pypi/v/aioinflux.svg
:target: https://pypi.python.org/pypi/aioinflux
:alt: PyPI package
.. image:: https://img.shields.io/pypi/pyversions/aioinflux.svg
:target: https://pypi.python.org/pypi/aioinflux
:alt: Supported Python versions
.. image:: https://readthedocs.org/projects/aioinflux/badge/?version=stable
:target: https://aioinflux.readthedocs.io/en/stable/?badge=stable
:alt: Documentation status
Asynchronous Python client for `InfluxDB`_. Built on top of
`aiohttp`_ and `asyncio`_.
Aioinflux is an alternative to the official InfluxDB Python client.
Aioinflux supports interacting with InfluxDB in a non-blocking way by using `aiohttp`_.
It also supports writing and querying of `Pandas`_ dataframes,
among other handy functionality.
.. _Pandas: http://pandas.pydata.org/
.. _InfluxDB: http://influxdata.com/
.. _asyncio: https://docs.python.org/3/library/asyncio.html
.. _aiohttp: https://github.com/aio-libs/aiohttp
Please refer to the `documentation`_ for more details.
Installation
------------
Python 3.6+ is required.
You also need to have access to a running instance of InfluxDB.
.. code:: bash
pip install aioinflux
Quick start
-----------
This sums most of what you can do with ``aioinflux``:
.. code:: python
import asyncio
from aioinflux import InfluxDBClient
point = {
'time': '2009-11-10T23:00:00Z',
'measurement': 'cpu_load_short',
'tags': {'host': 'server01',
'region': 'us-west'},
'fields': {'value': 0.64}
}
async def main():
async with InfluxDBClient(db='testdb') as client:
await client.create_database(db='testdb')
await client.write(point)
resp = await client.query('SELECT value FROM cpu_load_short')
print(resp)
asyncio.get_event_loop().run_until_complete(main())
See the `documentation`_ for more detailed usage.
.. _documentation: http://aioinflux.readthedocs.io/en/stable/ aioinflux-0.9.0/aioinflux/ 0000775 0000000 0000000 00000000000 13511520470 0015512 5 ustar 00root root 0000000 0000000 aioinflux-0.9.0/aioinflux/__init__.py 0000664 0000000 0000000 00000000320 13511520470 0017616 0 ustar 00root root 0000000 0000000 # flake8: noqa
from . import serialization
from .client import InfluxDBClient, InfluxDBError, InfluxDBWriteError
from .iterutils import iterpoints
from .serialization.usertype import *
__version__ = '0.9.0'
aioinflux-0.9.0/aioinflux/client.py 0000664 0000000 0000000 00000041645 13511520470 0017354 0 ustar 00root root 0000000 0000000 import asyncio
import json
import logging
import warnings
from functools import wraps
from typing import TypeVar, Union, AnyStr, Mapping, Iterable, Optional, AsyncGenerator
import aiohttp
from . import serialization
from .compat import *
if pd:
PointType = TypeVar('PointType', Mapping, dict, bytes, pd.DataFrame)
ResultType = TypeVar('ResultType', dict, bytes, pd.DataFrame)
else:
PointType = TypeVar('PointType', Mapping, dict, bytes)
ResultType = TypeVar('ResultType', dict, bytes)
# Aioinflux uses logging mainly for debugging purposes.
# Please attach your own handlers if you need logging.
logger = logging.getLogger('aioinflux')
def runner(coro):
"""Function execution decorator."""
@wraps(coro)
def inner(self, *args, **kwargs):
if self.mode == 'async':
return coro(self, *args, **kwargs)
return self._loop.run_until_complete(coro(self, *args, **kwargs))
return inner
class InfluxDBError(Exception):
"""Raised when an server-side error occurs"""
pass
class InfluxDBWriteError(InfluxDBError):
"""Raised when a server-side writing error occurs"""
def __init__(self, resp):
self.status = resp.status
self.headers = resp.headers
self.reason = resp.reason
super().__init__(f'Error writing data ({self.status} - {self.reason}): '
f'{self.headers.get("X-Influxdb-Error", "")}')
class InfluxDBClient:
def __init__(
self,
host: str = 'localhost',
port: int = 8086,
path: str = '/',
mode: str = 'async',
output: str = 'json',
db: Optional[str] = None,
database: Optional[str] = None,
ssl: bool = False,
*,
unix_socket: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
timeout: Optional[Union[aiohttp.ClientTimeout, float]] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
redis_opts: Optional[dict] = None,
cache_expiry: int = 86400,
**kwargs
):
"""
:class:`~aioinflux.client.InfluxDBClient` holds information necessary
to interact with InfluxDB.
It is async by default, but can also be used as a sync/blocking client.
When querying, responses are returned as parsed JSON by default,
but can also be wrapped in easily iterable
wrapper object or be parsed to Pandas DataFrames.
The three main public methods are the three endpoints of the InfluxDB API, namely:
1. :meth:`~.InfluxDBClient.ping`
2. :meth:`~.InfluxDBClient.write`
3. :meth:`~.InfluxDBClient.query`
See each of the above methods documentation for further usage details.
See also: https://docs.influxdata.com/influxdb/latest/tools/api/
:param host: Hostname to connect to InfluxDB.
:param port: Port to connect to InfluxDB.
:param path: Path to connect to InfluxDB.
:param mode: Mode in which client should run. Available options:
- ``async``: Default mode. Each query/request to the backend will
- ``blocking``: Behaves in sync/blocking fashion,
similar to the official InfluxDB-Python client.
:param output: Output format of the response received from InfluxDB.
- ``json``: Default format.
Returns parsed JSON as received from InfluxDB.
- ``dataframe``: Parses results into :py:class`pandas.DataFrame`.
Not compatible with chunked responses.
:param db: Default database to be used by the client.
:param ssl: If https should be used.
:param unix_socket: Path to the InfluxDB Unix domain socket.
:param username: Username to use to connect to InfluxDB.
:param password: User password.
:param timeout: Timeout in seconds or :class:`aiohttp.ClientTimeout` object
:param database: Default database to be used by the client.
This field is for argument consistency with the official InfluxDB Python client.
:param loop: Asyncio event loop.
:param redis_opts: Dict fo keyword arguments for :func:`aioredis.create_redis`
:param cache_expiry: Expiry time (in seconds) for cached data
:param kwargs: Additional kwargs for :class:`aiohttp.ClientSession`
"""
self._loop = loop or asyncio.get_event_loop()
self._session: aiohttp.ClientSession = None
self._redis: aioredis.Redis = None
self._mode = None
self._output = None
self._db = None
self.ssl = ssl
self.host = host
self.port = port
self.path = path
self.mode = mode
self.output = output
self.db = database or db
# ClientSession configuration
if username:
kwargs.update(auth=aiohttp.BasicAuth(username, password))
if unix_socket:
kwargs.update(connector=aiohttp.UnixConnector(unix_socket, loop=self._loop))
if timeout:
if isinstance(timeout, aiohttp.ClientTimeout):
kwargs.update(timeout=timeout)
else:
kwargs.update(timeout=aiohttp.ClientTimeout(total=timeout))
self.opts = kwargs
# Cache configuration
self.redis_opts = redis_opts
self.cache_expiry = cache_expiry
async def create_session(self, **kwargs):
"""Creates an :class:`aiohttp.ClientSession`
Override this or call it with ``kwargs`` to use other :mod:`aiohttp`
functionality not covered by :class:`~.InfluxDBClient.__init__`
"""
self.opts.update(kwargs)
self._session = aiohttp.ClientSession(**self.opts, loop=self._loop)
if self.redis_opts:
if aioredis:
self._redis = await aioredis.create_redis(**self.redis_opts,
loop=self._loop)
else:
warnings.warn(no_redis_warning)
@property
def url(self):
protocol = "https" if self.ssl else "http"
return f"{protocol}://{self.host}:{self.port}{self.path}{{endpoint}}"
@property
def mode(self):
return self._mode
@property
def output(self):
return self._output
@property
def db(self):
return self._db
@mode.setter
def mode(self, mode):
if mode not in ('async', 'blocking'):
raise ValueError('Invalid running mode')
self._mode = mode
@output.setter
def output(self, output):
if pd is None and output == 'dataframe':
raise ValueError(no_pandas_warning)
if output not in ('json', 'dataframe'):
raise ValueError('Invalid output format')
self._output = output
@db.setter
def db(self, db):
self._db = db
if not db:
warnings.warn(f'No default databases is set. '
f'Database must be specified when querying/writing.')
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
def __del__(self):
if not self._loop.is_closed() and self._session:
asyncio.ensure_future(self._session.close(), loop=self._loop)
def __repr__(self):
items = [f'{k}={v}' for k, v in vars(self).items() if not k.startswith('_')]
items.append(f'mode={self.mode}')
return f'{type(self).__name__}({", ".join(items)})'
@runner
async def close(self):
if self._session:
await self._session.close()
self._session = None
if self._redis:
self._redis.close()
@runner
async def ping(self) -> dict:
"""Pings InfluxDB
Returns a dictionary containing the headers of the response from ``influxd``.
"""
if not self._session:
await self.create_session()
async with self._session.get(self.url.format(endpoint='ping')) as resp:
logger.debug(f'{resp.status}: {resp.reason}')
return dict(resp.headers.items())
@runner
async def write(
self,
data: Union[PointType, Iterable[PointType]],
measurement: Optional[str] = None,
db: Optional[str] = None,
precision: Optional[str] = None,
rp: Optional[str] = None,
tag_columns: Optional[Iterable] = None,
**extra_tags,
) -> bool:
"""Writes data to InfluxDB.
Input can be:
1. A mapping (e.g. ``dict``) containing the keys:
``measurement``, ``time``, ``tags``, ``fields``
2. A Pandas :class:`~pandas.DataFrame` with a :class:`~pandas.DatetimeIndex`
3. A user defined class decorated w/
:func:`~aioinflux.serialization.usertype.lineprotocol`
4. A string (``str`` or ``bytes``) properly formatted in InfluxDB's line protocol
5. An iterable of one of the above
Input data in formats 1-3 are parsed to the line protocol before being
written to InfluxDB.
See the `InfluxDB docs `_ for more details.
:param data: Input data (see description above).
:param measurement: Measurement name. Mandatory when when writing DataFrames only.
When writing dictionary-like data, this field is treated as the default value
for points that do not contain a `measurement` field.
:param db: Database to be written to. Defaults to `self.db`.
:param precision: Sets the precision for the supplied Unix time values.
Ignored if input timestamp data is of non-integer type.
Valid values: ``{'ns', 'u', 'µ', 'ms', 's', 'm', 'h'}``
:param rp: Sets the target retention policy for the write.
If unspecified, data is written to the default retention policy.
:param tag_columns: Columns to be treated as tags
(used when writing DataFrames only)
:param extra_tags: Additional tags to be added to all points passed.
:return: Returns ``True`` if insert is successful.
Raises :py:class:`ValueError` otherwise.
"""
if not self._session:
await self.create_session()
if precision is not None:
# FIXME: Implement. Related issue: aioinflux/pull/13
raise NotImplementedError("'precision' parameter is not supported yet")
data = serialization.serialize(data, measurement, tag_columns, **extra_tags)
params = {'db': db or self.db}
if rp:
params['rp'] = rp
url = self.url.format(endpoint='write')
async with self._session.post(url, params=params, data=data) as resp:
if resp.status == 204:
return True
raise InfluxDBWriteError(resp)
@runner
async def query(
self,
q: AnyStr,
*,
epoch: str = 'ns',
chunked: bool = False,
chunk_size: Optional[int] = None,
db: Optional[str] = None,
use_cache: bool = False,
) -> Union[AsyncGenerator[ResultType, None], ResultType]:
"""Sends a query to InfluxDB.
Please refer to the InfluxDB documentation for all the possible queries:
https://docs.influxdata.com/influxdb/latest/query_language/
:param q: Raw query string
:param db: Database to be queried. Defaults to `self.db`.
:param epoch: Precision level of response timestamps.
Valid values: ``{'ns', 'u', 'µ', 'ms', 's', 'm', 'h'}``.
:param chunked: If ``True``, makes InfluxDB return results in streamed batches
rather than as a single response.
Returns an AsyncGenerator which yields responses
in the same format as non-chunked queries.
:param chunk_size: Max number of points for each chunk. By default, InfluxDB chunks
responses by series or by every 10,000 points, whichever occurs first.
:param use_cache:
:return: Response in the format specified by the combination of
:attr:`.InfluxDBClient.output` and ``chunked``
"""
async def _chunked_generator(url, data, dataframe):
async with self._session.post(url, data=data) as resp:
logger.debug(f'{resp.status} (CHUNKED): {q!r}')
# Hack to avoid aiohttp raising ValueError('Line is too long')
# The number 128 is arbitrary (may be too large/small).
resp.content._high_water *= 128
chunk_count = 0
async for chunk in resp.content:
chunk = json.loads(chunk)
self._check_error(chunk)
chunk_count += 1
logger.debug(f'Yielding chunk #{chunk_count:03d}')
if dataframe:
yield serialization.dataframe.parse(chunk)
else:
yield chunk
if not self._session:
await self.create_session()
# InfluxDB documentation is wrong regarding `/query` parameters
# See https://github.com/influxdata/docs.influxdata.com/issues/1807
if not isinstance(chunked, bool):
raise ValueError("'chunked' must be a boolean")
data = dict(q=q, db=db or self.db, chunked=str(chunked).lower(), epoch=epoch)
if chunked and chunk_size:
data['chunk_size'] = chunk_size
url = self.url.format(endpoint='query')
if chunked:
if use_cache:
raise ValueError("Can't use cache w/ chunked queries")
elif self.mode != 'async':
raise ValueError("Can't use 'chunked' with non-async mode")
else:
return _chunked_generator(url, data, self.output == 'dataframe')
key = f'aioinflux:{q}'
if use_cache and self._redis and await self._redis.exists(key):
logger.debug(f'Cache HIT: {q}')
data = lz4.decompress(await self._redis.get(key))
else:
async with self._session.post(url, data=data) as resp:
data = await resp.read()
if use_cache and self._redis:
logger.debug(f'Cache MISS ({resp.status}): {q}')
if resp.status == 200:
await self._redis.set(key, lz4.compress(data))
await self._redis.expire(key, self.cache_expiry)
else:
logger.debug(f'{resp.status}: {q}')
data = json.loads(data)
self._check_error(data)
if self.output == 'json':
return data
elif self.output == 'dataframe':
return serialization.dataframe.parse(data)
else:
raise ValueError('Invalid output format')
@staticmethod
def _check_error(response):
"""Checks for JSON error messages and raises Python exception"""
if 'error' in response:
raise InfluxDBError(response['error'])
elif 'results' in response:
for statement in response['results']:
if 'error' in statement:
msg = '{d[error]} (statement {d[statement_id]})'
raise InfluxDBError(msg.format(d=statement))
# InfluxQL - Data management
# --------------------------
def create_database(self, db=None):
db = db or self.db
return self.query(f'CREATE DATABASE "{db}"')
def drop_database(self, db=None):
db = db or self.db
return self.query(f'DROP DATABASE "{db}"')
def drop_measurement(self, measurement):
return self.query(f'DROP MEASUREMENT "{measurement}"')
# InfluxQL - Schema exploration
# -----------------------------
def show_databases(self):
return self.query("SHOW DATABASES")
def show_measurements(self):
return self.query("SHOW MEASUREMENTS")
def show_users(self):
return self.query("SHOW USERS")
def show_series(self, measurement=None):
if measurement:
return self.query(f"SHOW SERIES FROM {measurement}")
return self.query("SHOW SERIES")
def show_tag_keys(self, measurement=None):
if measurement:
return self.query(f"SHOW TAG KEYS FROM {measurement}")
return self.query("SHOW TAG KEYS")
def show_field_keys(self, measurement=None):
if measurement:
return self.query(f"SHOW FIELD KEYS FROM {measurement}")
return self.query("SHOW FIELD KEYS")
def show_tag_values(self, key, measurement=None):
if measurement:
return self.query(f'SHOW TAG VALUES FROM "{measurement}" WITH key = "{key}"')
return self.query(f'SHOW TAG VALUES WITH key = "{key}"')
def show_retention_policies(self):
return self.query("SHOW RETENTION POLICIES")
# InfluxQL - Other
# ----------------
def show_continuous_queries(self):
return self.query("SHOW CONTINUOUS QUERIES")
aioinflux-0.9.0/aioinflux/compat.py 0000664 0000000 0000000 00000001043 13511520470 0017345 0 ustar 00root root 0000000 0000000 import warnings
no_pandas_warning = "Pandas/Numpy is not available. Support for 'dataframe' mode is disabled."
no_redis_warning = "Redis dependencies not available. Support for caching is disabled."
try:
import pandas as pd
import numpy as np
except ModuleNotFoundError:
pd = None
np = None
warnings.warn(no_pandas_warning)
try:
import aioredis
import lz4.block as lz4
except ModuleNotFoundError:
aioredis = None
lz4 = None
__all__ = ['no_pandas_warning', 'no_redis_warning', 'pd', 'np', 'aioredis', 'lz4']
aioinflux-0.9.0/aioinflux/iterutils.py 0000664 0000000 0000000 00000003427 13511520470 0020116 0 ustar 00root root 0000000 0000000 import inspect
from typing import Optional, Iterator, Callable, Any
def iterpoints(resp: dict, parser: Optional[Callable] = None) -> Iterator[Any]:
"""Iterates a response JSON yielding data point by point.
Can be used with both regular and chunked responses.
By default, returns just a plain list of values representing each point,
without column names, or other metadata.
In case a specific format is needed, an optional ``parser`` argument can be passed.
``parser`` is a function/callable that takes data point values
and, optionally, a ``meta`` parameter containing which takes a
dictionary containing all or a subset of the following:
``{'columns', 'name', 'tags', 'statement_id'}``.
Sample parser functions:
.. code:: python
# Function optional meta argument
def parser(*x, meta):
return dict(zip(meta['columns'], x))
# Namedtuple (callable)
from collections import namedtuple
parser = namedtuple('MyPoint', ['col1', 'col2', 'col3'])
:param resp: Dictionary containing parsed JSON (output from InfluxDBClient.query)
:param parser: Optional parser function/callable
:return: Generator object
"""
for statement in resp['results']:
if 'series' not in statement:
continue
for series in statement['series']:
if parser is None:
return (x for x in series['values'])
elif 'meta' in inspect.signature(parser).parameters:
meta = {k: series[k] for k in series if k != 'values'}
meta['statement_id'] = statement['statement_id']
return (parser(*x, meta=meta) for x in series['values'])
else:
return (parser(*x) for x in series['values'])
return iter([])
aioinflux-0.9.0/aioinflux/serialization/ 0000775 0000000 0000000 00000000000 13511520470 0020367 5 ustar 00root root 0000000 0000000 aioinflux-0.9.0/aioinflux/serialization/__init__.py 0000664 0000000 0000000 00000001535 13511520470 0022504 0 ustar 00root root 0000000 0000000 # flake8: noqa 402
from ..compat import pd
if pd:
from . import dataframe
from . import mapping
def serialize(data, measurement=None, tag_columns=None, **extra_tags):
"""Converts input data into line protocol format"""
if isinstance(data, bytes):
return data
elif isinstance(data, str):
return data.encode('utf-8')
elif hasattr(data, 'to_lineprotocol'):
return data.to_lineprotocol()
elif pd is not None and isinstance(data, pd.DataFrame):
return dataframe.serialize(data, measurement, tag_columns, **extra_tags)
elif isinstance(data, dict):
return mapping.serialize(data, measurement, **extra_tags)
elif hasattr(data, '__iter__'):
return b'\n'.join([serialize(i, measurement, tag_columns, **extra_tags) for i in data])
else:
raise ValueError('Invalid input', data)
aioinflux-0.9.0/aioinflux/serialization/common.py 0000664 0000000 0000000 00000001755 13511520470 0022241 0 ustar 00root root 0000000 0000000 import warnings
# Special characters documentation:
# https://docs.influxdata.com/influxdb/v1.4/write_protocols/line_protocol_reference/#special-characters
# Although not in the official docs, new line characters are removed in order to avoid issues.
# Go implementation: https://github.com/influxdata/influxdb/blob/master/pkg/escape/strings.go
key_escape = str.maketrans({'\\': '\\\\', ',': r'\,', ' ': r'\ ', '=': r'\=', '\n': ''})
tag_escape = str.maketrans({'\\': '\\\\', ',': r'\,', ' ': r'\ ', '=': r'\=', '\n': ''})
str_escape = str.maketrans({'\\': '\\\\', '"': r'\"', '\n': ''})
measurement_escape = str.maketrans({'\\': '\\\\', ',': r'\,', ' ': r'\ ', '\n': ''})
def escape(string, escape_pattern):
"""Assistant function for string escaping"""
try:
return string.translate(escape_pattern)
except AttributeError:
warnings.warn("Non-string-like data passed. "
"Attempting to convert to 'str'.")
return str(string).translate(tag_escape)
aioinflux-0.9.0/aioinflux/serialization/dataframe.py 0000664 0000000 0000000 00000010225 13511520470 0022665 0 ustar 00root root 0000000 0000000 import re
from functools import reduce
from itertools import chain
from typing import Union, Dict, List
import pandas as pd
import numpy as np
from .common import *
DataFrameType = Union[pd.DataFrame, Dict[str, pd.DataFrame], List[Dict[str, pd.DataFrame]]]
# Serialization helper functions
# -------------------------------
def _serializer(series) -> pd.DataFrame:
df = pd.DataFrame(series.get('values', []), columns=series['columns'])
if 'time' not in df.columns:
return df
df: pd.DataFrame = df.set_index(pd.to_datetime(df['time'])).drop('time', axis=1)
df.index = df.index.tz_localize('UTC')
df.index.name = None
if 'tags' in series:
for k, v in series['tags'].items():
df[k] = v
if 'name' in series:
df.name = series['name']
return df
def _get_name(series):
tags = [f'{k}={v}' for k, v in series.get('tags', {}).items()]
return ','.join(filter(None, [series.get('name'), *tags])) or None
def _drop_zero_index(df):
if isinstance(df.index, pd.DatetimeIndex):
if all(i.value == 0 for i in df.index):
return df.reset_index(drop=True)
return df
def parse(resp) -> DataFrameType:
"""Makes a dictionary of DataFrames from a response object"""
statements = []
for statement in resp['results']:
series = {}
for s in statement.get('series', []):
series[_get_name(s)] = _drop_zero_index(_serializer(s))
statements.append(series)
if len(statements) == 1:
series: dict = statements[0]
if len(series) == 1:
return list(series.values())[0] # DataFrame
else:
return series # dict
return statements # list
# Parsing helper functions
# -------------------------
def _itertuples(df):
"""Custom implementation of ``DataFrame.itertuples`` that
returns plain tuples instead of namedtuples. About 50% faster.
"""
cols = [df.iloc[:, k] for k in range(len(df.columns))]
return zip(df.index, *cols)
def _replace(df):
obj_cols = {k for k, v in dict(df.dtypes).items() if v is np.dtype('O')}
other_cols = set(df.columns) - obj_cols
obj_nans = (f'{k}="nan"' for k in obj_cols)
other_nans = (f'{k}=nani?' for k in other_cols)
replacements = [
('|'.join(chain(obj_nans, other_nans)), ''),
(',{2,}', ','),
('|'.join([', ,', ', ', ' ,']), ' '),
]
return replacements
def serialize(df, measurement, tag_columns=None, **extra_tags) -> bytes:
"""Converts a Pandas DataFrame into line protocol format"""
# Pre-processing
if measurement is None:
raise ValueError("Missing 'measurement'")
if not isinstance(df.index, pd.DatetimeIndex):
raise ValueError('DataFrame index is not DatetimeIndex')
tag_columns = set(tag_columns or [])
isnull = df.isnull().any(axis=1)
# Make parser function
tags = []
fields = []
for k, v in extra_tags.items():
tags.append(f"{k}={escape(v, key_escape)}")
for i, (k, v) in enumerate(df.dtypes.items()):
k = k.translate(key_escape)
if k in tag_columns:
tags.append(f"{k}={{p[{i+1}]}}")
elif issubclass(v.type, np.integer):
fields.append(f"{k}={{p[{i+1}]}}i")
elif issubclass(v.type, (np.float, np.bool_)):
fields.append(f"{k}={{p[{i+1}]}}")
else:
# String escaping is skipped for performance reasons
# Strings containing double-quotes can cause strange write errors
# and should be sanitized by the user.
# e.g., df[k] = df[k].astype('str').str.translate(str_escape)
fields.append(f"{k}=\"{{p[{i+1}]}}\"")
fmt = (f'{measurement}', f'{"," if tags else ""}', ','.join(tags),
' ', ','.join(fields), ' {p[0].value}')
f = eval("lambda p: f'{}'".format(''.join(fmt)))
# Map/concat
if isnull.any():
lp = map(f, _itertuples(df[~isnull]))
rep = _replace(df)
lp_nan = (reduce(lambda a, b: re.sub(*b, a), rep, f(p))
for p in _itertuples(df[isnull]))
return '\n'.join(chain(lp, lp_nan)).encode('utf-8')
else:
return '\n'.join(map(f, _itertuples(df))).encode('utf-8')
aioinflux-0.9.0/aioinflux/serialization/mapping.py 0000664 0000000 0000000 00000004276 13511520470 0022405 0 ustar 00root root 0000000 0000000 import time
from typing import Mapping
import ciso8601
from .common import *
def serialize(point: Mapping, measurement=None, **extra_tags) -> bytes:
"""Converts dictionary-like data into a single line protocol line (point)"""
tags = _serialize_tags(point, extra_tags)
return (
f'{_serialize_measurement(point, measurement)}'
f'{"," if tags else ""}{tags} '
f'{_serialize_fields(point)} '
f'{_serialize_timestamp(point)}'
).encode()
def _serialize_measurement(point, measurement):
try:
return escape(point['measurement'], measurement_escape)
except KeyError:
if measurement is None:
raise ValueError("'measurement' missing")
return escape(measurement, measurement_escape)
def _serialize_tags(point, extra_tags):
output = []
for k, v in {**point.get('tags', {}), **extra_tags}.items():
k = escape(k, key_escape)
v = escape(v, tag_escape)
if not v:
continue # ignore blank/null string tags
output.append(f'{k}={v}')
return ','.join(output)
def _serialize_timestamp(point):
dt = point.get('time')
if not dt:
return ''
elif isinstance(dt, int):
return dt
elif isinstance(dt, (str, bytes)):
dt = ciso8601.parse_datetime(dt)
if not dt:
raise ValueError(f'Invalid datetime string: {dt!r}')
if not dt.tzinfo:
# Assume tz-naive input to be in UTC, not local time
return int(dt.timestamp() - time.timezone) * 10 ** 9 + dt.microsecond * 1000
return int(dt.timestamp()) * 10 ** 9 + dt.microsecond * 1000
def _serialize_fields(point):
"""Field values can be floats, integers, strings, or Booleans."""
output = []
for k, v in point['fields'].items():
k = escape(k, key_escape)
if isinstance(v, bool):
output.append(f'{k}={v}')
elif isinstance(v, int):
output.append(f'{k}={v}i')
elif isinstance(v, str):
output.append(f'{k}="{v.translate(str_escape)}"')
elif v is None:
# Empty values
continue
else:
# Floats
output.append(f'{k}={v}')
return ','.join(output)
aioinflux-0.9.0/aioinflux/serialization/usertype.py 0000664 0000000 0000000 00000016672 13511520470 0022635 0 ustar 00root root 0000000 0000000 import enum
import ciso8601
import time
import decimal
from collections import Counter
from typing import TypeVar, Optional, Mapping, Union
from datetime import datetime
# noinspection PyUnresolvedReferences
from .common import * # noqa
from ..compat import pd
__all__ = [
'lineprotocol', 'SchemaError',
'MEASUREMENT', 'TIMEINT', 'TIMESTR', 'TIMEDT',
'TAG', 'TAGENUM',
'BOOL', 'INT', 'DECIMAL', 'FLOAT', 'STR', 'ENUM',
]
MEASUREMENT = TypeVar('MEASUREMENT', bound=str)
TIMEINT = TypeVar('TIMEINT', bound=int)
TIMESTR = TypeVar('TIMESTR', bound=str)
TIMEDT = TypeVar('TIMEDT', bound=datetime)
TAG = TypeVar('TAG', bound=str)
TAGENUM = TypeVar('TAGENUM', bound=enum.Enum)
BOOL = TypeVar('BOOL', bound=bool)
INT = TypeVar('INT', bound=int)
DECIMAL = TypeVar('DECIMAL', bound=decimal.Decimal)
FLOAT = TypeVar('FLOAT', bound=float)
STR = TypeVar('STR', bound=str)
ENUM = TypeVar('ENUM', bound=enum.Enum)
time_types = [TIMEINT, TIMEDT, TIMESTR]
tag_types = [TAG, TAGENUM]
field_types = [BOOL, INT, DECIMAL, FLOAT, STR, ENUM]
optional_field_types = [Optional[f] for f in field_types]
class SchemaError(TypeError):
"""Raised when invalid schema is passed to :func:`lineprotocol`"""
def str_to_dt(s):
dt = ciso8601.parse_datetime(s)
if dt:
return dt
raise ValueError(f'Invalid datetime string: {dt!r}')
def dt_to_int(dt):
if not dt.tzinfo:
# Assume tz-naive input to be in UTC, not local time
return int(dt.timestamp() - time.timezone) * 10 ** 9 + dt.microsecond * 1000
return int(dt.timestamp()) * 10 ** 9 + dt.microsecond * 1000
def _validate_schema(schema, placeholder):
c = Counter(schema.values())
if not c:
raise SchemaError("Schema/type annotations missing")
if c[MEASUREMENT] > 1:
raise SchemaError("Class can't have more than one 'MEASUREMENT' attribute")
if sum(c[e] for e in time_types) > 1:
raise SchemaError(f"Can't have more than one timestamp-type attribute {time_types}")
if sum(c[e] for e in field_types + optional_field_types) < 1 and not placeholder:
raise SchemaError(f"Must have one or more non-empty "
f"field-type attributes {field_types}")
def is_optional(t, base_type):
"""Checks if type hint is Optional[base_type]"""
# NOTE: The 'typing' module is still "provisional" and documentation sub-optimal,
# which requires these kinds instrospection into undocumented implementation details
# NOTE: May break in Python 3.8
# TODO: Check if works on Python 3.6
try:
cond1 = getattr(t, '__origin__') is Union
cond2 = {type(None), base_type} == set(getattr(t, '__args__', []))
if cond1 and cond2:
return True
except AttributeError:
return False
return False
def _make_serializer(meas, schema, extra_tags, placeholder): # noqa: C901
"""Factory of line protocol parsers"""
_validate_schema(schema, placeholder)
tags = []
fields = []
ts = None
meas = meas
for k, t in schema.items():
if t is MEASUREMENT:
meas = f"{{i.{k}}}"
elif t is TIMEINT:
ts = f"{{i.{k}}}"
elif t is TIMESTR:
if pd:
ts = f"{{pd.Timestamp(i.{k} or 0).value}}"
else:
ts = f"{{dt_to_int(str_to_dt(i.{k}))}}"
elif t is TIMEDT:
if pd:
ts = f"{{pd.Timestamp(i.{k} or 0).value}}"
else:
ts = f"{{dt_to_int(i.{k})}}"
elif t is TAG or is_optional(t, TAG):
tags.append(f"{k}={{str(i.{k}).translate(tag_escape)}}")
elif t is TAGENUM or is_optional(t, TAGENUM):
tags.append(f"{k}={{getattr(i.{k}, 'name', i.{k} or None)}}")
elif t is FLOAT or is_optional(t, FLOAT):
fields.append(f"{k}={{i.{k}}}")
elif t is DECIMAL or is_optional(t, DECIMAL):
fields.append(f"{k}={{i.{k}}}")
elif t is BOOL or is_optional(t, BOOL):
fields.append(f"{k}={{i.{k}}}")
elif t is INT or is_optional(t, INT):
fields.append(f"{k}={{i.{k}}}i")
elif t is STR or is_optional(t, STR):
fields.append(f"{k}=\\\"{{str(i.{k}).translate(str_escape)}}\\\"")
elif t is ENUM or is_optional(t, ENUM):
fields.append(f"{k}=\\\"{{getattr(i.{k}, 'name', i.{k} or None)}}\\\"")
else:
raise SchemaError(f"Invalid attribute type {k!r}: {t!r}")
extra_tags = extra_tags or {}
for k, v in extra_tags.items():
tags.append(f"{k}={v}")
if placeholder:
fields.insert(0, f"_=true")
sep = ',' if tags else ''
ts = f' {ts}' if ts else ''
fmt = f"{meas}{sep}{','.join(tags)} {','.join(fields)}{ts}"
f = eval(f'lambda i: f"{fmt}".encode()')
f.__doc__ = "Returns InfluxDB line protocol representation of user-defined class"
return f
def lineprotocol(
cls=None,
*,
schema: Optional[Mapping[str, type]] = None,
rm_none: bool = False,
extra_tags: Optional[Mapping[str, str]] = None,
placeholder: bool = False
):
"""Adds ``to_lineprotocol`` method to arbitrary user-defined classes
:param cls: Class to monkey-patch
:param schema: Schema dictionary (attr/type pairs).
:param rm_none: Whether apply a regex to remove ``None`` values.
If ``False``, passing ``None`` values to boolean, integer or float or time fields
will result in write errors. Setting to ``True`` is "safer" but impacts performance.
:param extra_tags: Hard coded tags to be added to every point generated.
:param placeholder: If no field attributes are present, add a placeholder attribute (``_``)
which is always equal to ``True``. This is a workaround for creating field-less points
(which is not supported natively by InfluxDB)
"""
opts = dict(
schema=schema,
rm_none=rm_none,
extra_tags=extra_tags or {},
placeholder=placeholder,
)
def _lineprotocol(cls):
_schema = schema or getattr(cls, '__annotations__', {})
# TODO: Raise warning or exception if schema has optionals but rm_none is False
# for t in _schema.values():
# for bt in field_types + tag_types:
# if is_optional(t, bt):
# warnings.warn("")
f = _make_serializer(cls.__name__, _schema, extra_tags, placeholder)
cls.to_lineprotocol = f
cls.to_lineprotocol.opts = opts
return cls
def _rm_none_lineprotocol(cls):
def _parser_selector(i):
if not hasattr(i, '_asdict'):
raise ValueError("'rm_none' can only be used with namedtuples")
key = tuple([k for k, v in i._asdict().items() if v != '' and v is not None])
if key not in parsers:
_schema = schema or getattr(cls, '__annotations__', {})
_schema = {k: v for k, v in _schema.items() if k in key}
parsers[key] = _make_serializer(cls.__name__, _schema, extra_tags, placeholder)
return parsers[key](i)
parsers = {}
cls.to_lineprotocol = _parser_selector
cls.to_lineprotocol.opts = opts
return cls
if cls:
if rm_none:
# Using rm_none has substantial runtime impact.
# Best avoided if performance is critical.
return _rm_none_lineprotocol(cls)
# No options
return _lineprotocol(cls)
else:
if rm_none:
return _rm_none_lineprotocol
return _lineprotocol
aioinflux-0.9.0/docs/ 0000775 0000000 0000000 00000000000 13511520470 0014444 5 ustar 00root root 0000000 0000000 aioinflux-0.9.0/docs/Makefile 0000664 0000000 0000000 00000001136 13511520470 0016105 0 ustar 00root root 0000000 0000000 # Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
SPHINXPROJ = aioinflux
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) aioinflux-0.9.0/docs/_static/ 0000775 0000000 0000000 00000000000 13511520470 0016072 5 ustar 00root root 0000000 0000000 aioinflux-0.9.0/docs/_static/custom.css 0000664 0000000 0000000 00000000073 13511520470 0020116 0 ustar 00root root 0000000 0000000 .wy-table-responsive table td {
white-space: normal;
}
aioinflux-0.9.0/docs/_static/rst2html.css 0000664 0000000 0000000 00000050200 13511520470 0020360 0 ustar 00root root 0000000 0000000 /**
* :Author: Chad Skeeters
* :Contact: cskeeters@nciinc.com
* Stylesheet for use with Docutils/rst2html.
* Example: rst2html --stylesheet=rst2html.css README.rst doc/html/README.html
*/
html {
font-size: 100%;
-webkit-text-size-adjust: 100%;
-ms-text-size-adjust: 100%;
}
a:focus {
outline: thin dotted #333;
outline: 5px auto -webkit-focus-ring-color;
outline-offset: -2px;
}
a:hover,
a:active {
outline: 0;
}
sub,
sup {
position: relative;
font-size: 75%;
line-height: 0;
vertical-align: baseline;
}
sup {
top: -0.5em;
}
sub {
bottom: -0.25em;
}
img {
width: auto\9;
height: auto;
max-width: 100%;
vertical-align: middle;
border: 0;
-ms-interpolation-mode: bicubic;
}
@media print {
* {
color: #000 !important;
text-shadow: none !important;
background: transparent !important;
box-shadow: none !important;
}
a,
a:visited {
text-decoration: underline;
}
a[href]:after {
content: " (" attr(href) ")";
}
abbr[title]:after {
content: " (" attr(title) ")";
}
.ir a:after,
a[href^="javascript:"]:after,
a[href^="#"]:after {
content: "";
}
pre,
blockquote {
border: 1px solid #999;
page-break-inside: avoid;
}
thead {
display: table-header-group;
}
tr,
img {
page-break-inside: avoid;
}
img {
max-width: 100% !important;
}
@ page {
margin: 0.5cm;
}
h1 {
page-break-before: always;
}
h1.title {
page-break-before: avoid;
}
p,
h2,
h3 {
orphans: 3;
widows: 3;
}
h2,
h3 {
page-break-after: avoid;
}
}
body {
margin: 40px;
margin-right: auto;
margin-left: auto;
width: 700px;
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
font-size: 14px;
line-height: 20px;
color: #333333;
background-color: #ffffff;
}
a {
color: #0088cc;
text-decoration: none;
}
a:hover,
a:focus {
color: #005580;
text-decoration: underline;
}
.img-rounded {
-webkit-border-radius: 6px;
-moz-border-radius: 6px;
border-radius: 6px;
}
.img-polaroid {
padding: 4px;
background-color: #fff;
border: 1px solid #ccc;
border: 1px solid rgba(0, 0, 0, 0.2);
-webkit-box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
-moz-box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
}
p {
margin: 0 0 10px;
}
small {
font-size: 85%;
}
strong {
font-weight: bold;
}
em {
font-style: italic;
}
cite {
font-style: normal;
}
h1,
h2,
h3,
h4,
h5,
h6 {
font-family: inherit;
font-weight: bold;
line-height: 20px;
color: inherit;
text-rendering: optimizelegibility;
}
h1 {
font-size: 2em;
padding-bottom: .2em;
border-bottom: 1px solid grey;
}
h1.title {
padding-bottom: 1em;
border-bottom: 0px;
}
h2 {
font-size: 1.5em;
}
h3 {
font-size: 1.3em;
font-family: Georgia, serif;
font-style: italic;
/*font-weight:normal;*/;
}
h4 {
font-size: 1.3em;
}
h5 {
font-size: 1.2em;
}
h6 {
font-size: 1.1em;
}
ul,
ol {
padding: 0;
margin: 0 0 10px 25px;
}
ul ul,
ul ol,
ol ol,
ol ul {
margin-bottom: 0;
}
li {
line-height: 20px;
}
dl {
margin-bottom: 20px;
}
dt,
dd {
line-height: 20px;
}
dt {
font-weight: bold;
}
dd {
margin-left: 10px;
}
hr {
margin: 20px 0;
border: 0;
border-top: 1px solid #eeeeee;
border-bottom: 1px solid #ffffff;
}
abbr[title],
abbr[data-original-title] {
cursor: help;
border-bottom: 1px dotted #999999;
}
abbr.initialism {
font-size: 90%;
text-transform: uppercase;
}
blockquote {
padding: 0 0 0 15px;
margin: 0 0 20px;
border-left: 5px solid #eeeeee;
}
blockquote p {
margin-bottom: 0;
font-size: 17.5px;
font-weight: 300;
line-height: 1.25;
}
q:before,
q:after,
blockquote:before,
blockquote:after {
content: "";
}
address {
display: block;
margin-bottom: 20px;
font-style: normal;
line-height: 20px;
}
code,
pre {
padding: 0 3px 2px;
font-family: Monaco, Menlo, Consolas, "Courier New", monospace;
font-size: 12px;
color: #333333;
-webkit-border-radius: 3px;
-moz-border-radius: 3px;
border-radius: 3px;
}
code {
padding: 2px 4px;
color: #d14;
white-space: nowrap;
background-color: #f7f7f9;
border: 1px solid #e1e1e8;
}
pre {
display: block;
padding: 9.5px;
margin: 0 0 10px;
font-size: 13px;
line-height: 20px;
word-break: break-all;
word-wrap: break-word;
white-space: pre;
white-space: pre-wrap;
background-color: #f5f5f5;
border: 1px solid #ccc;
border: 1px solid rgba(0, 0, 0, 0.15);
-webkit-border-radius: 4px;
-moz-border-radius: 4px;
border-radius: 4px;
}
pre.prettyprint {
margin-bottom: 20px;
}
pre code {
padding: 0;
color: inherit;
white-space: pre;
white-space: pre-wrap;
background-color: transparent;
border: 0;
}
.pre-scrollable {
max-height: 340px;
overflow-y: scroll;
}
table {
max-width: 100%;
background-color: transparent;
border-collapse: collapse;
border-spacing: 0;
}
.table {
width: 100%;
margin-bottom: 20px;
}
.table th,
.table td {
padding: 8px;
line-height: 20px;
text-align: left;
vertical-align: top;
border-top: 1px solid #dddddd;
}
.table th {
font-weight: bold;
}
.table thead th {
vertical-align: bottom;
}
.table caption + thead tr:first-child th,
.table caption + thead tr:first-child td,
.table colgroup + thead tr:first-child th,
.table colgroup + thead tr:first-child td,
.table thead:first-child tr:first-child th,
.table thead:first-child tr:first-child td {
border-top: 0;
}
.table tbody + tbody {
border-top: 2px solid #dddddd;
}
.table .table {
background-color: #ffffff;
}
.table-condensed th,
.table-condensed td {
padding: 4px 5px;
}
.table-bordered {
border: 1px solid #dddddd;
border-collapse: separate;
*border-collapse: collapse;
border-left: 0;
-webkit-border-radius: 4px;
-moz-border-radius: 4px;
border-radius: 4px;
}
.table-bordered th,
.table-bordered td {
border-left: 1px solid #dddddd;
}
.table-bordered caption + thead tr:first-child th,
.table-bordered caption + tbody tr:first-child th,
.table-bordered caption + tbody tr:first-child td,
.table-bordered colgroup + thead tr:first-child th,
.table-bordered colgroup + tbody tr:first-child th,
.table-bordered colgroup + tbody tr:first-child td,
.table-bordered thead:first-child tr:first-child th,
.table-bordered tbody:first-child tr:first-child th,
.table-bordered tbody:first-child tr:first-child td {
border-top: 0;
}
.table-bordered thead:first-child tr:first-child > th:first-child,
.table-bordered tbody:first-child tr:first-child > td:first-child,
.table-bordered tbody:first-child tr:first-child > th:first-child {
-webkit-border-top-left-radius: 4px;
border-top-left-radius: 4px;
-moz-border-radius-topleft: 4px;
}
.table-bordered thead:first-child tr:first-child > th:last-child,
.table-bordered tbody:first-child tr:first-child > td:last-child,
.table-bordered tbody:first-child tr:first-child > th:last-child {
-webkit-border-top-right-radius: 4px;
border-top-right-radius: 4px;
-moz-border-radius-topright: 4px;
}
.table-bordered thead:last-child tr:last-child > th:first-child,
.table-bordered tbody:last-child tr:last-child > td:first-child,
.table-bordered tbody:last-child tr:last-child > th:first-child,
.table-bordered tfoot:last-child tr:last-child > td:first-child,
.table-bordered tfoot:last-child tr:last-child > th:first-child {
-webkit-border-bottom-left-radius: 4px;
border-bottom-left-radius: 4px;
-moz-border-radius-bottomleft: 4px;
}
.table-bordered thead:last-child tr:last-child > th:last-child,
.table-bordered tbody:last-child tr:last-child > td:last-child,
.table-bordered tbody:last-child tr:last-child > th:last-child,
.table-bordered tfoot:last-child tr:last-child > td:last-child,
.table-bordered tfoot:last-child tr:last-child > th:last-child {
-webkit-border-bottom-right-radius: 4px;
border-bottom-right-radius: 4px;
-moz-border-radius-bottomright: 4px;
}
.table-bordered tfoot + tbody:last-child tr:last-child td:first-child {
-webkit-border-bottom-left-radius: 0;
border-bottom-left-radius: 0;
-moz-border-radius-bottomleft: 0;
}
.table-bordered tfoot + tbody:last-child tr:last-child td:last-child {
-webkit-border-bottom-right-radius: 0;
border-bottom-right-radius: 0;
-moz-border-radius-bottomright: 0;
}
.table-bordered caption + thead tr:first-child th:first-child,
.table-bordered caption + tbody tr:first-child td:first-child,
.table-bordered colgroup + thead tr:first-child th:first-child,
.table-bordered colgroup + tbody tr:first-child td:first-child {
-webkit-border-top-left-radius: 4px;
border-top-left-radius: 4px;
-moz-border-radius-topleft: 4px;
}
.table-bordered caption + thead tr:first-child th:last-child,
.table-bordered caption + tbody tr:first-child td:last-child,
.table-bordered colgroup + thead tr:first-child th:last-child,
.table-bordered colgroup + tbody tr:first-child td:last-child {
-webkit-border-top-right-radius: 4px;
border-top-right-radius: 4px;
-moz-border-radius-topright: 4px;
}
.table-striped tbody > tr:nth-child(odd) > td,
.table-striped tbody > tr:nth-child(odd) > th {
background-color: #f9f9f9;
}
.table-hover tbody tr:hover > td,
.table-hover tbody tr:hover > th {
background-color: #f5f5f5;
}
table td[class*="span"],
table th[class*="span"],
.row-fluid table td[class*="span"],
.row-fluid table th[class*="span"] {
display: table-cell;
float: none;
margin-left: 0;
}
.hero-unit {
padding: 60px;
margin-bottom: 30px;
font-size: 18px;
font-weight: 200;
line-height: 30px;
color: inherit;
background-color: #eeeeee;
-webkit-border-radius: 6px;
-moz-border-radius: 6px;
border-radius: 6px;
}
.hero-unit h1 {
margin-bottom: 0;
font-size: 60px;
line-height: 1;
letter-spacing: -1px;
color: inherit;
}
.hero-unit li {
line-height: 30px;
}
/* rst2html default used to remove borders from tables and images */
.borderless, table.borderless td, table.borderless th {
border: 0;
}
table.borderless td, table.borderless th {
/* Override padding for "table.docutils td" with "! important".
The right padding separates the table cells. */
padding: 0 0.5em 0 0 ! important;
}
.first {
/* Override more specific margin styles with "! important". */
margin-top: 0 ! important;
}
.last, .with-subtitle {
margin-bottom: 0 ! important;
}
.hidden {
display: none;
}
a.toc-backref {
text-decoration: none;
color: black;
}
blockquote.epigraph {
margin: 2em 5em;
}
dl.docutils dd {
margin-bottom: 0.5em;
}
object[type="image/svg+xml"], object[type="application/x-shockwave-flash"] {
overflow: hidden;
}
/* Uncomment (and remove this text!) to get bold-faced definition list terms
dl.docutils dt {
font-weight: bold }
*/
div.abstract {
margin: 2em 5em;
}
div.abstract p.topic-title {
font-weight: bold;
text-align: center;
}
div.admonition, div.attention, div.caution, div.danger, div.error,
div.hint, div.important, div.note, div.tip, div.warning {
margin: 2em;
border: medium outset;
padding: 1em;
}
div.note, div.warning {
margin: 1.5em 0px;
border: none;
}
div.note p.admonition-title,
div.warning p.admonition-title {
display: none;
}
/* Clearfix
* http://css-tricks.com/snippets/css/clear-fix/
*/
div.note:after,
div.warning:after {
content: "";
display: table;
clear: both;
}
div.note p:before,
div.warning p:before {
display: block;
float: left;
font-size: 4em;
line-height: 1em;
margin-right: 20px;
margin-left: 0em;
margin-top: -10px;
content: '\0270D';
/*handwriting*/;
}
div.warning p:before {
content: '\026A0';
/*warning*/;
}
div.admonition p.admonition-title, div.hint p.admonition-title,
div.important p.admonition-title, div.note p.admonition-title,
div.tip p.admonition-title {
font-weight: bold;
font-family: sans-serif;
}
div.attention p.admonition-title, div.caution p.admonition-title,
div.danger p.admonition-title, div.error p.admonition-title,
div.warning p.admonition-title, .code .error {
color: red;
font-weight: bold;
font-family: sans-serif;
}
/* Uncomment (and remove this text!) to get reduced vertical space in
compound paragraphs.
div.compound .compound-first, div.compound .compound-middle {
margin-bottom: 0.5em }
div.compound .compound-last, div.compound .compound-middle {
margin-top: 0.5em }
*/
div.dedication {
margin: 2em 5em;
text-align: center;
font-style: italic;
}
div.dedication p.topic-title {
font-weight: bold;
font-style: normal;
}
div.figure {
margin-left: 2em;
margin-right: 2em;
}
div.footer, div.header {
clear: both;
font-size: smaller;
}
div.line-block {
display: block;
margin-top: 1em;
margin-bottom: 1em;
}
div.line-block div.line-block {
margin-top: 0;
margin-bottom: 0;
margin-left: 1.5em;
}
div.sidebar {
margin: 0 0 0.5em 1em;
border: medium outset;
padding: 1em;
background-color: #ffffee;
width: 40%;
float: right;
clear: right;
}
div.sidebar p.rubric {
font-family: sans-serif;
font-size: medium;
}
div.system-messages {
margin: 5em;
}
div.system-messages h1 {
color: red;
}
div.system-message {
border: medium outset;
padding: 1em;
}
div.system-message p.system-message-title {
color: red;
font-weight: bold;
}
div.topic {
margin: 2em;
}
h1.section-subtitle, h2.section-subtitle, h3.section-subtitle,
h4.section-subtitle, h5.section-subtitle, h6.section-subtitle {
margin-top: 0.4em;
}
h1.title {
text-align: center;
}
h2.subtitle {
text-align: center;
}
hr.docutils {
width: 75%;
}
img.align-left, .figure.align-left, object.align-left {
clear: left;
float: left;
margin-right: 1em;
}
img.align-right, .figure.align-right, object.align-right {
clear: right;
float: right;
margin-left: 1em;
}
img.align-center, .figure.align-center, object.align-center {
display: block;
margin-left: auto;
margin-right: auto;
}
.align-left {
text-align: left;
}
.align-center {
clear: both;
text-align: center;
}
.align-right {
text-align: right;
}
/* reset inner alignment in figures */
div.align-right {
text-align: inherit;
}
/* div.align-center * { */
/* text-align: left } */
ol.simple, ul.simple {
margin-bottom: 1em;
}
ol.arabic {
list-style: decimal;
}
ol.loweralpha {
list-style: lower-alpha;
}
ol.upperalpha {
list-style: upper-alpha;
}
ol.lowerroman {
list-style: lower-roman;
}
ol.upperroman {
list-style: upper-roman;
}
p.attribution {
text-align: right;
margin-left: 50%;
}
p.caption {
font-style: italic;
}
p.credits {
font-style: italic;
font-size: smaller;
}
p.label {
white-space: nowrap;
}
p.rubric {
font-weight: bold;
font-size: larger;
color: maroon;
text-align: center;
}
p.sidebar-title {
font-family: sans-serif;
font-weight: bold;
font-size: larger;
}
p.sidebar-subtitle {
font-family: sans-serif;
font-weight: bold;
}
p.topic-title {
font-weight: bold;
}
pre.address {
margin-bottom: 0;
margin-top: 0;
font: inherit;
}
pre.literal-block, pre.doctest-block, pre.math, pre.code {
margin-left: 2em;
margin-right: 2em;
}
pre.code .ln {
color: grey;
} /* line numbers */
pre.code, code {
background-color: #eeeeee;
}
pre.code .comment, code .comment {
color: #5C6576;
}
pre.code .keyword, code .keyword {
color: #3B0D06;
font-weight: bold;
}
pre.code .literal.string, code .literal.string {
color: #0C5404;
}
pre.code .name.builtin, code .name.builtin {
color: #352B84;
}
pre.code .deleted, code .deleted {
background-color: #DEB0A1;
}
pre.code .inserted, code .inserted {
background-color: #A3D289;
}
span.classifier {
font-family: sans-serif;
font-style: oblique;
}
span.classifier-delimiter {
font-family: sans-serif;
font-weight: bold;
}
span.interpreted {
font-family: sans-serif;
}
span.option {
white-space: nowrap;
}
span.pre {
white-space: pre;
}
span.problematic {
color: red;
}
span.section-subtitle {
/* font-size relative to parent (h1..h6 element) */
font-size: 80%;
}
table.citation {
border-left: solid 1px gray;
margin-left: 1px;
}
table.docinfo {
margin: 2em 4em;
}
table.docutils {
margin-top: 0.5em;
margin-bottom: 0.5em;
}
table.footnote {
border-left: solid 1px black;
margin-left: 1px;
}
table.docutils td, table.docutils th,
table.docinfo td, table.docinfo th {
padding-left: 0.5em;
padding-right: 0.5em;
vertical-align: top;
}
table.docutils th.field-name, table.docinfo th.docinfo-name {
font-weight: bold;
text-align: left;
white-space: nowrap;
padding-left: 0;
}
h1 tt.docutils, h2 tt.docutils, h3 tt.docutils,
h4 tt.docutils, h5 tt.docutils, h6 tt.docutils {
font-size: 100%;
}
ul.auto-toc {
list-style-type: none;
}
.code .pygments-hll {
background-color: #ffffcc;
}
.code .pygments-c {
color: #60a0b0;
font-style: italic;
} /* Comment */
.code .pygments-err {
border: 1px solid #FF0000;
} /* Error */
.code .pygments-k {
color: #007020;
font-weight: bold;
} /* Keyword */
.code .pygments-o {
color: #666666;
} /* Operator */
.code .pygments-cm {
color: #60a0b0;
font-style: italic;
} /* Comment.Multiline */
.code .pygments-cp {
color: #007020;
} /* Comment.Preproc */
.code .pygments-c1 {
color: #60a0b0;
font-style: italic;
} /* Comment.Single */
.code .pygments-cs {
color: #60a0b0;
background-color: #fff0f0;
} /* Comment.Special */
.code .pygments-gd {
color: #A00000;
} /* Generic.Deleted */
.code .pygments-ge {
font-style: italic;
} /* Generic.Emph */
.code .pygments-gr {
color: #FF0000;
} /* Generic.Error */
.code .pygments-gh {
color: #000080;
font-weight: bold;
} /* Generic.Heading */
.code .pygments-gi {
color: #00A000;
} /* Generic.Inserted */
.code .pygments-go {
color: #888888;
} /* Generic.Output */
.code .pygments-gp {
color: #c65d09;
font-weight: bold;
} /* Generic.Prompt */
.code .pygments-gs {
font-weight: bold;
} /* Generic.Strong */
.code .pygments-gu {
color: #800080;
font-weight: bold;
} /* Generic.Subheading */
.code .pygments-gt {
color: #0044DD;
} /* Generic.Traceback */
.code .pygments-kc {
color: #007020;
font-weight: bold;
} /* Keyword.Constant */
.code .pygments-kd {
color: #007020;
font-weight: bold;
} /* Keyword.Declaration */
.code .pygments-kn {
color: #007020;
font-weight: bold;
} /* Keyword.Namespace */
.code .pygments-kp {
color: #007020;
} /* Keyword.Pseudo */
.code .pygments-kr {
color: #007020;
font-weight: bold;
} /* Keyword.Reserved */
.code .pygments-kt {
color: #902000;
} /* Keyword.Type */
.code .pygments-m {
color: #40a070;
} /* Literal.Number */
.code .pygments-s {
color: #4070a0;
} /* Literal.String */
.code .pygments-na {
color: #4070a0;
} /* Name.Attribute */
.code .pygments-nb {
color: #007020;
} /* Name.Builtin */
.code .pygments-nc {
color: #0e84b5;
font-weight: bold;
} /* Name.Class */
.code .pygments-no {
color: #60add5;
} /* Name.Constant */
.code .pygments-nd {
color: #555555;
font-weight: bold;
} /* Name.Decorator */
.code .pygments-ni {
color: #d55537;
font-weight: bold;
} /* Name.Entity */
.code .pygments-ne {
color: #007020;
} /* Name.Exception */
.code .pygments-nf {
color: #06287e;
} /* Name.Function */
.code .pygments-nl {
color: #002070;
font-weight: bold;
} /* Name.Label */
.code .pygments-nn {
color: #0e84b5;
font-weight: bold;
} /* Name.Namespace */
.code .pygments-nt {
color: #062873;
font-weight: bold;
} /* Name.Tag */
.code .pygments-nv {
color: #bb60d5;
} /* Name.Variable */
.code .pygments-ow {
color: #007020;
font-weight: bold;
} /* Operator.Word */
.code .pygments-w {
color: #bbbbbb;
} /* Text.Whitespace */
.code .pygments-mf {
color: #40a070;
} /* Literal.Number.Float */
.code .pygments-mh {
color: #40a070;
} /* Literal.Number.Hex */
.code .pygments-mi {
color: #40a070;
} /* Literal.Number.Integer */
.code .pygments-mo {
color: #40a070;
} /* Literal.Number.Oct */
.code .pygments-sb {
color: #4070a0;
} /* Literal.String.Backtick */
.code .pygments-sc {
color: #4070a0;
} /* Literal.String.Char */
.code .pygments-sd {
color: #4070a0;
font-style: italic;
} /* Literal.String.Doc */
.code .pygments-s2 {
color: #4070a0;
} /* Literal.String.Double */
.code .pygments-se {
color: #4070a0;
font-weight: bold;
} /* Literal.String.Escape */
.code .pygments-sh {
color: #4070a0;
} /* Literal.String.Heredoc */
.code .pygments-si {
color: #70a0d0;
font-style: italic;
} /* Literal.String.Interpol */
.code .pygments-sx {
color: #c65d09;
} /* Literal.String.Other */
.code .pygments-sr {
color: #235388;
} /* Literal.String.Regex */
.code .pygments-s1 {
color: #4070a0;
} /* Literal.String.Single */
.code .pygments-ss {
color: #517918;
} /* Literal.String.Symbol */
.code .pygments-bp {
color: #007020;
} /* Name.Builtin.Pseudo */
.code .pygments-vc {
color: #bb60d5;
} /* Name.Variable.Class */
.code .pygments-vg {
color: #bb60d5;
} /* Name.Variable.Global */
.code .pygments-vi {
color: #bb60d5;
} /* Name.Variable.Instance */
.code .pygments-il {
color: #40a070;
} /* Literal.Number.Integer.Long */ aioinflux-0.9.0/docs/api.rst 0000664 0000000 0000000 00000001257 13511520470 0015754 0 ustar 00root root 0000000 0000000 API Reference
=============
.. contents::
:local:
Client Interface
----------------
.. autoclass:: aioinflux.client.InfluxDBClient
:members:
.. automethod:: __init__
.. autoexception:: aioinflux.client.InfluxDBError
.. autoexception:: aioinflux.client.InfluxDBWriteError
Result iteration
""""""""""""""""
.. automodule:: aioinflux.iterutils
:members:
Serialization
-------------
Mapping
"""""""
.. automodule:: aioinflux.serialization.mapping
:members:
Dataframe
"""""""""
.. automodule:: aioinflux.serialization.dataframe
:members:
User-defined classes
""""""""""""""""""""
.. automodule:: aioinflux.serialization.usertype
:members:
:undoc-members:
aioinflux-0.9.0/docs/conf.py 0000664 0000000 0000000 00000013231 13511520470 0015743 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
from pathlib import Path
meta = {}
with open(Path(__file__).parent.parent / 'aioinflux' / '__init__.py') as f:
exec('\n'.join(l for l in f if l.startswith('__')), meta)
def setup(app):
app.add_stylesheet('custom.css')
project = 'aioinflux'
copyright = '2018, Gustavo Bezerra'
author = 'Gustavo Bezerra'
# The short X.Y version
version = meta['__version__']
# The full version, including alpha/beta/rc tags
release = meta['__version__']
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.autosectionlabel',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
'sphinx_autodoc_typehints'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'aioinfluxdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'aioinflux.tex', 'aioinflux Documentation',
'Gustavo Bezerra', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'aioinflux', 'aioinflux Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'aioinflux', 'aioinflux Documentation',
author, 'aioinflux', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
todo_include_todos = True
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None),
'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None),
'aiohttp': ('https://docs.aiohttp.org/en/stable/', None),
'aioredis': ('https://aioredis.readthedocs.io/en/stable/', None),
}
aioinflux-0.9.0/docs/implementation.rst 0000664 0000000 0000000 00000001310 13511520470 0020216 0 ustar 00root root 0000000 0000000 Implementation details
======================
Since InfluxDB exposes all its functionality through an `HTTP
API `__,
:class:`~aioinflux.client.InfluxDBClient` tries to be nothing more
than a thin and simple wrapper around that API.
The InfluxDB HTTP API exposes exactly three endpoints/functions:
:meth:`~aioinflux.client.InfluxDBClient.ping`,
:meth:`~aioinflux.client.InfluxDBClient.write` and
:meth:`~aioinflux.client.InfluxDBClient.query`.
:class:`~aioinflux.client.InfluxDBClient` merely wraps these three functions and provides
some parsing functionality for generating line protocol data (when
writing) and parsing JSON responses (when querying).
aioinflux-0.9.0/docs/index.rst 0000664 0000000 0000000 00000002046 13511520470 0016307 0 ustar 00root root 0000000 0000000 .. aioinflux documentation master file, created by
sphinx-quickstart on Sun Jul 22 19:52:56 2018.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to aioinflux's documentation!
=====================================
.. include:: ../README.rst
:start-line: 2
:end-line: 35
.. toctree::
:maxdepth: 3
:caption: Contents:
install
usage
implementation
api
Contributing
------------
| To contribute, fork the repository on GitHub, make your changes and
submit a pull request.
| Aioinflux is not a mature project yet, so just simply raising issues
is also greatly appreciated :)
Alternatives
------------
- `InfluxDB-Python `__: The official
blocking-only client. Based on Requests.
- `influx-sansio `__: Fork of aioinflux
using curio/trio and asks as a backend.
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
aioinflux-0.9.0/docs/install.rst 0000664 0000000 0000000 00000002142 13511520470 0016643 0 ustar 00root root 0000000 0000000 Installation
============
To install the latest release:
.. code:: bash
$ pip install aioinflux
$ pip install aioinflux[pandas] # For DataFrame parsing support
The library is still in beta, so you may also want to install the latest version from
the development branch:
.. code:: bash
$ pip install git+https://github.com/plugaai/aioinflux@dev
Dependencies
~~~~~~~~~~~~
Aioinflux supports Python 3.6+ **ONLY**. For older Python versions
please use the `official Python client`_.
However, there is `some discussion `_
regarding Pypy/Python 3.5 support.
The main third-party library dependency is |aiohttp|, for all HTTP
request handling. and |pandas| for :class:`~pandas.DataFrame` reading/writing support.
There are currently no plans to support other HTTP libraries besides |aiohttp|.
If |aiohttp| + |asyncio| is not your soup, see :ref:`Alternatives`.
.. |asyncio| replace:: :py:mod:`asyncio`
.. |aiohttp| replace:: :py:mod:`aiohttp`
.. |pandas| replace:: :py:mod:`pandas`
.. _`official Python Client`: https://github.com/influxdata/influxdb-python aioinflux-0.9.0/docs/make.bat 0000664 0000000 0000000 00000001455 13511520470 0016056 0 ustar 00root root 0000000 0000000 @ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
set SPHINXPROJ=aioinflux
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
:end
popd
aioinflux-0.9.0/docs/usage.rst 0000664 0000000 0000000 00000061630 13511520470 0016310 0 ustar 00root root 0000000 0000000 User Guide
==========
.. contents::
:local:
TL;DR
-----
This sums most of what you can do with :mod:`aioinflux`:
.. code:: python
import asyncio
from aioinflux import InfluxDBClient
point = {
'time': '2009-11-10T23:00:00Z',
'measurement': 'cpu_load_short',
'tags': {'host': 'server01',
'region': 'us-west'},
'fields': {'value': 0.64}
}
async def main():
async with InfluxDBClient(db='testdb') as client:
await client.create_database(db='testdb')
await client.write(point)
resp = await client.query('SELECT value FROM cpu_load_short')
print(resp)
asyncio.get_event_loop().run_until_complete(main())
Client modes
------------
Despite the library's name, |client| can also run in non-async
mode (a.k.a ``blocking``) mode. It can be useful for debugging and exploratory
data analysis.
The running mode for can be switched on-the-fly by changing the ``mode`` attribute:
.. code:: python
client = InfluxDBClient(mode='blocking')
client.mode = 'async'
The ``blocking`` mode is implemented through a decorator that automatically runs coroutines on
the event loop as soon as they are generated.
Usage is almost the same as in the ``async`` mode, but without the need of using ``await`` and
being able to run from outside of a coroutine function:
.. code:: python
client = InfluxDBClient(db='testdb', mode='blocking')
client.ping()
client.write(point)
client.query('SELECT value FROM cpu_load_short')
.. note::
The need for the ``blocking`` mode has been somewhat supplanted
by the new async REPL available with the release of IPython 7.0.
See `this blog post `__ for details.
If you are having issues running ``blocking`` mode with recent Python/IPython versions,
see `this issue `__ for other possible workarounds.
Writing data
------------
To write data to InfluxDB, use |client|'s
|write| method.
Successful writes will return ``True``. In case some error occurs :class:`~aioinflux.client.InfluxDBWriteError`
exception will be raised.
Input data to |write| can be:
1. A mapping (e.g. ``dict``) containing the keys: ``measurement``, ``time``, ``tags``, ``fields``
2. A :class:`pandas.DataFrame` with a |datetimeindex|
3. A user defined class decorated w/ |lineprotocol|
(**recommended**, see :ref:`below `)
4. A string (``str`` or ``bytes``) properly formatted in InfluxDB's line protocol
5. An iterable of one of the above
Input data in formats 1-3 are serialized into the `line protocol`_ before being written to InfluxDB.
``str`` or ``bytes`` are assumed to already be in line protocol format and are inserted into InfluxDB as they are.
All functionality regarding JSON parsing (InfluxDB's only output format) and serialization to line protocol
(InfluxDB's only input format) is located in the :mod:`~aioinflux.serialization` subpackage.
Beware that serialization is not highly optimized (C extensions / cythonization PRs are welcome!) and may become
a bottleneck depending on your application's performance requirements.
It is, however, reasonably (3-10x) `faster`_ than InfluxDB's `official Python client`_.
.. _`official Python client`: https://github.com/influxdata/influxdb-python
.. _`line protocol`: https://docs.influxdata.com/influxdb/latest/write_protocols/line_protocol_reference/
.. _`faster`: https://gist.github.com/gusutabopb/42550f0f07628ba61b0ed6322f02855b
Writing dictionary-like objects
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. warning::
This is the same format as the one used by InfluxDB's `official Python client`_ and is implemented
in Aioinflux for compatibility purposes only.
Using dictionaries to write data to InfluxDB is slower and more error-prone than the other methods
provided by Aioinflux and therefore **discouraged**.
Aioinflux accepts any dictionary-like object (mapping) as input.
The dictionary must contain the following keys:
1) **measurement**: Optional. Must be a string-like object. If
omitted, must be specified when calling |write|
by passing a ``measurement`` argument.
2) **time**: Optional. The value can be |datetime|,
date-like string (e.g., ``2017-01-01``, ``2009-11-10T23:00:00Z``) or
anything else that can be parsed by :class:`pandas.Timestamp`.
See :ref:`Pandas documentation ` for details.
If Pandas is not available, |ciso8601|_ is used instead for date-like string parsing.
3) **tags**: Optional. This must contain another mapping of field
names and values. Both tag keys and values should be strings.
4) **fields**: Mandatory. This must contain another mapping of field
names and values. Field keys should be strings. Field values can be
``float``, ``int``, ``str``, ``bool`` or ``None`` or any its subclasses.
Attempting to use Numpy types will cause errors as ``np.int64``, ``np.float64``, etc are not
subclasses of Python's built-in numeric types.
Use dataframes for writing data using Numpy types.
.. |ciso8601| replace:: ``ciso8601``
.. _ciso8601: https://github.com/closeio/ciso8601/
Any keys other then the above will be ignored when writing data to
InfluxDB.
A typical dictionary-like point would look something like the following:
.. code:: python
{'time': '2009-11-10T23:00:00Z',
'measurement': 'cpu_load_short',
'tags': {'host': 'server01', 'region': 'us-west'},
'fields': {'value1': 0.64, 'value2': True, 'value3': 10}}
.. note:: **Timestamps and timezones**
Working with timezones in computing tends to be quite messy.
To avoid such problems, the `broadly agreed`_ upon idea is to store
timestamps in UTC. This is how both InfluxDB and Pandas treat timestamps internally.
Pandas and many other libraries also assume all input timestamps are in UTC unless otherwise
explicitly noted. Aioinflux does the same and assumes any timezone-unaware |datetime| object
or datetime-like strings is in UTC.
Aioinflux does not raise any warnings when timezone-unaware input is passed
and silently assumes it to be in UTC.
.. _`broadly agreed`: http://lucumr.pocoo.org/2011/7/15/eppur-si-muove/
Writing DataFrames
^^^^^^^^^^^^^^^^^^
Aioinflux also accepts Pandas dataframes as input. The only requirements
for the dataframe is that the index **must** be of type
|datetimeindex|. Also, any column whose ``dtype`` is ``object`` will
be converted to a string representation.
A typical dataframe input should look something like the following:
.. code:: text
LUY BEM AJW tag
2017-06-24 08:45:17.929097+00:00 2.545409 5.173134 5.532397 B
2017-06-24 10:15:17.929097+00:00 -0.306673 -1.132941 -2.130625 E
2017-06-24 11:45:17.929097+00:00 0.894738 -0.561979 -1.487940 B
2017-06-24 13:15:17.929097+00:00 -1.799512 -1.722805 -2.308823 D
2017-06-24 14:45:17.929097+00:00 0.390137 -0.016709 -0.667895 E
The measurement name must be specified with the ``measurement`` argument
when calling |write|.
Columns that should be treated as tags must be specified by passing a sequence as the ``tag_columns`` argument.
Additional tags (not present in the actual dataframe) can also be passed using arbitrary keyword arguments.
**Example:**
.. code:: python
client = InfluxDBClient(db='testdb', mode='blocking')
client.write(df, measurement='prices', tag_columns=['tag'], asset_class='equities')
In the example above, ``df`` is the dataframe we are trying to write to
InfluxDB and ``measurement`` is the measurement we are writing to.
``tag_columns`` is in an optional iterable telling which of the
dataframe columns should be parsed as tag values. If ``tag_columns`` is
not explicitly passed, all columns in the dataframe whose dtype is not
|datetimeindex| will be treated as InfluxDB field values.
Any other keyword arguments passed to |write| are
treated as extra tags which will be attached to the data being written
to InfluxDB. Any string which is a valid `InfluxDB identifier`_ and
valid `Python identifier`_ can be used as an extra tag key (with the
exception of the strings ``data``, ``measurement`` and ``tag_columns``).
See :ref:`API reference ` for details.
.. _`InfluxDB identifier`: https://docs.influxdata.com/influxdb/latest/query_language/spec/#identifiers
.. _`Python identifier`: https://docs.python.org/3/reference/lexical_analysis.html#identifiers
Writing user-defined class objects
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. versionchanged:: 0.5.0
Aioinflux can add write any arbitrary user-defined class to InfluxDB through the use of the
|lineprotocol| decorator. This decorator monkey-patches an
existing class and adds a ``to_lineprotocol`` method, which is used internally by Aioinflux to serialize
the class data into a InfluxDB-compatible format. In order to generate ``to_lineprotocol``, a typed schema
must be defined using `type hints`_ in the form of type annotations or a schema dictionary.
This is the fastest and least error-prone method of writing data into InfluxDB provided by Aioinflux.
.. _`type hints`: https://docs.python.org/3/library/typing.html
We recommend using |lineprotocol| with :py:class:`~typing.NamedTuple`:
.. code:: python
from aioinflux import *
from typing import NamedTuple
@lineprotocol
class Trade(NamedTuple):
timestamp: TIMEINT
instrument: TAGENUM
source: TAG
side: TAG
price: FLOAT
size: INT
trade_id: STR
Alternatively, the functional form of :py:func:`~collections.namedtuple` can also be used:
.. code:: python
from collections import namedtuple
schema = dict(
timestamp=TIMEINT,
instrument=TAG,
source=TAG,
side=TAG,
price=FLOAT,
size=INT,
trade_id=STR,
)
# Create class
Trade = namedtuple('Trade', schema.keys())
# Monkey-patch existing class and add ``to_lineprotocol``
Trade = lineprotocol(Trade, schema=schema)
Dataclasses (or any other user-defined class) can be used as well:
.. code:: python
from dataclasses import dataclass
@lineprotocol
@dataclass
class Trade:
timestamp: TIMEINT
instrument: TAGENUM
source: TAG
side: TAG
price: FLOAT
size: INT
trade_id: STR
If you want to preserve type annotations for another use,
you can pass your serialization schema as a dictionary as well:
.. code:: python
@lineprotocol(schema=dict(timestamp=TIMEINT, value=FLOAT))
@dataclass
class MyTypedClass:
timestamp: int
value: float
print(MyTypedClass.__annotations__)
# {'timestamp': , 'value': }
MyTypedClass(1547710904202826000, 2.1).to_lineprotocol()
# b'MyTypedClass value=2.1 1547710904202826000'
The modified class will have a dynamically generated ``to_lineprotocol`` method which
generates a line protocol representation of the data contained by the object:
.. code:: python
trade = Trade(
timestamp=1540184368785116000,
instrument='AAPL',
source='NASDAQ',
side='BUY',
price=219.23,
size=100,
trade_id='34a1e085-3122-429c-9662-7ce82039d287'
)
trade.to_lineprotocol()
# b'Trade,instrument=AAPL,source=NASDAQ,side=BUY price=219.23,size=100i,trade_id="34a1e085-3122-429c-9662-7ce82039d287" 1540184368785116000'
Calling ``to_lineprotocol`` by the end-user is not necessary but may be useful for debugging.
``to_lineprotocol`` is automatically used by |write| when present.
.. code:: python
client = InfluxDBClient()
await client.write(trade) # True
User-defined class schema/type annotations
""""""""""""""""""""""""""""""""""""""""""
In Aioinflux, InfluxDB types (and derived types) are represented by :py:class:`~typing.TypeVar`
defined in :mod:`aioinflux.serialization.usertype` module.
All schema types (type annotations) **must** be one of those types.
The types available are based on the native types of InfluxDB
(see the `InfluxDB docs `__ for
details), with some extra types to help the serialization to line protocol and/or allow more flexible usage
(such as the use of :py:class:`~enum.Enum` objects).
.. list-table::
:header-rows: 1
:widths: 10 30
:align: center
* - Type
- Description
* - ``MEASUREMENT``
- Optional. If missing, the measurement becomes the class name
* - ``TIMEINT``
- Timestamp is a nanosecond UNIX timestamp
* - ``TIMESTR``
- Timestamp is a datetime string (somewhat compliant to ISO 8601)
* - ``TIMEDT``
- Timestamp is a |datetime| (or subclasses such as :class:`pandas.Timestamp`)
* - ``TAG``
- Treats field as an InfluxDB tag
* - ``TAGENUM``
- Same as ``TAG`` but allows the use of :py:class:`~enum.Enum`
* - ``BOOL``
- Boolean field
* - ``INT``
- Integer field
* - ``FLOAT``
- Float field
* - ``STR``
- String field
* - ``ENUM``
- Same as ``STR`` but allows the use of :py:class:`~enum.Enum`
``TAG*`` types are optional. One and only one ``TIME*`` type must present. At least ONE field type be present.
``@lineprotocol`` options
"""""""""""""""""""""""""
The |lineprotocol| function/decorator provides some options to
customize how object serialization is performed.
See the :ref:`API reference ` for details.
Performance
"""""""""""
Serialization using |lineprotocol| is about 3x faster
than dictionary-like objects (or about 10x faster than the `official Python client`_).
See this `notebook `__
for a simple benchmark.
Beware that setting ``rm_none=True`` can have substantial performance impact especially when
the number of fields/tags is very large (20+).
Querying data
-------------
Querying data is as simple as passing an InfluxDB query string to |query|:
.. code:: python
await client.query('SELECT myfield FROM mymeasurement')
By default, this returns JSON data:
.. code:: python
{'results': [{'series': [{'columns': ['time', 'Price', 'Volume'],
'name': 'mymeasurement',
'values': [[1491963424224703000, 5783, 100],
[1491963424375146000, 5783, 200],
[1491963428374895000, 5783, 100],
[1491963429645478000, 5783, 1100],
[1491963429655289000, 5783, 100],
[1491963437084443000, 5783, 100],
[1491963442274656000, 5783, 900],
[1491963442274657000, 5782, 5500],
[1491963442274658000, 5781, 3200],
[1491963442314710000, 5782, 100]]}],
'statement_id': 0}]}
See `InfluxDB official docs `_
for more on the InfluxDB's HTTP API specifics.
Output formats
^^^^^^^^^^^^^^
When using, |query| data can return data in one of the following formats:
1) ``json``: Default. Returns a dictionary representation of the JSON response received from InfluxDB.
2) ``dataframe``: Parses the result into a Pandas dataframe(s).
See :ref:`Retrieving DataFrames` for details.
The output format for can be switched on-the-fly by changing the ``output`` attribute:
.. code:: python
client = InfluxDBClient(output='dataframe')
client.mode = 'json'
Beware that when passing ``chunked=True``, the result type will be an async generator.
See :ref:`Chunked responses` for details.
Retrieving DataFrames
^^^^^^^^^^^^^^^^^^^^^
When the client is in ``dataframe`` mode, |query|
will usually return a :class:`pandas.DataFrame`:
.. code:: text
Price Volume
2017-04-12 02:17:04.224703+00:00 5783 100
2017-04-12 02:17:04.375146+00:00 5783 200
2017-04-12 02:17:08.374895+00:00 5783 100
2017-04-12 02:17:09.645478+00:00 5783 1100
2017-04-12 02:17:09.655289+00:00 5783 100
2017-04-12 02:17:17.084443+00:00 5783 100
2017-04-12 02:17:22.274656+00:00 5783 900
2017-04-12 02:17:22.274657+00:00 5782 5500
2017-04-12 02:17:22.274658+00:00 5781 3200
2017-04-12 02:17:22.314710+00:00 5782 100
.. note::
On multi-statement queries and/or statements that return multiple InfluxDB series
(such as a ``GROUP by "tag"`` query), a list of dictionaries of dataframes will be returned.
Aioinflux generates a dataframe for each series contained in the JSON returned by InfluxDB.
See this `Github issue `__ for further discussion.
When generating dataframes, InfluxDB types are mapped to the following Numpy/Pandas dtypes:
.. list-table::
:header-rows: 1
:align: center
* - InfluxDB type
- Dataframe column ``dtype``
* - Float
- ``float64``
* - Integer
- ``int64``
* - String
- ``object``
* - Boolean
- ``bool``
* - Timestamp
- ``datetime64``
Chunked responses
^^^^^^^^^^^^^^^^^
Aioinflux supports InfluxDB chunked queries. Passing ``chunked=True`` when calling
|query|, returns an :py:class:`~collections.abc.AsyncGenerator` object,
which can asynchronously iterated.
Using chunked requests allows response processing to be partially done before
the full response is retrieved, reducing overall query time
(at least in theory - your mileage may vary).
.. code:: python
chunks = await client.query("SELECT * FROM mymeasurement", chunked=True)
async for chunk in chunks:
# do something
await process_chunk(...)
When using chunked responses with ``dataframe`` output, the following construct may be useful:
.. code:: python
cursor = await client.query("SELECT * FROM mymeasurement", chunked=True)
df = pd.concat([i async for i in cursor])
If you need to keep track of when the chunks are being returned,
consider setting up a logging handler at ``DEBUG`` level (see :ref:`Debugging` for details).
See the `InfluxDB official docs `__
for more on chunked responses.
Iterating responses
^^^^^^^^^^^^^^^^^^^
By default, |query| returns a parsed JSON response from InfluxDB.
In order to easily iterate over that JSON response point by point, Aioinflux
provides the |iterpoints| function, which returns a generator object:
.. code:: python
from aioinflux import iterpoints
r = client.query('SELECT * from h2o_quality LIMIT 10')
for i in iterpoints(r):
print(i)
.. code:: text
[1439856000000000000, 41, 'coyote_creek', '1']
[1439856000000000000, 99, 'santa_monica', '2']
[1439856360000000000, 11, 'coyote_creek', '3']
[1439856360000000000, 56, 'santa_monica', '2']
[1439856720000000000, 65, 'santa_monica', '3']
|iterpoints| can also be used with chunked responses:
.. code:: python
chunks = await client.query('SELECT * from h2o_quality', chunked=True)
async for chunk in chunks:
for point in iterpoints(chunk):
# do something
Using custom parsers
""""""""""""""""""""
By default, the generator returned by |iterpoints|
yields a plain list of values without doing any expensive parsing.
However, in case a specific format is needed, an optional ``parser`` argument can be passed.
``parser`` is a function/callable that takes data point values
and, optionally, a ``meta`` parameter containing which takes a
dictionary containing all or a subset of the following:
``{'columns', 'name', 'tags', 'statement_id'}``.
- Example using a regular function and ``meta``
.. code:: python
r = await client.query('SELECT * from h2o_quality LIMIT 5')
for i in iterpoints(r, lambda *x, meta: dict(zip(meta['columns'], x))):
print(i)
.. code:: text
{'time': 1439856000000000000, 'index': 41, 'location': 'coyote_creek', 'randtag': '1'}
{'time': 1439856000000000000, 'index': 99, 'location': 'santa_monica', 'randtag': '2'}
{'time': 1439856360000000000, 'index': 11, 'location': 'coyote_creek', 'randtag': '3'}
{'time': 1439856360000000000, 'index': 56, 'location': 'santa_monica', 'randtag': '2'}
{'time': 1439856720000000000, 'index': 65, 'location': 'santa_monica', 'randtag': '3'}
- Example using a :py:func:`~collections.namedtuple`
.. code:: python
from collections import namedtuple
nt = namedtuple('MyPoint', ['time', 'index', 'location', 'randtag'])
r = await client.query('SELECT * from h2o_quality LIMIT 5')
for i in iterpoints(r, parser=nt):
print(i)
.. code:: text
MyPoint(time=1439856000000000000, index=41, location='coyote_creek', randtag='1')
MyPoint(time=1439856000000000000, index=99, location='santa_monica', randtag='2')
MyPoint(time=1439856360000000000, index=11, location='coyote_creek', randtag='3')
MyPoint(time=1439856360000000000, index=56, location='santa_monica', randtag='2')
MyPoint(time=1439856720000000000, index=65, location='santa_monica', randtag='3')
Caching query results
^^^^^^^^^^^^^^^^^^^^^
.. versionadded:: v0.7.0
Aioinflux provides an optional caching layer on top of InfluxDB, based on `Redis`_ and :mod:`aioredis`.
The caching functionality is designed for highly iterative/repetitive workloads
(i.e.: machine learning / quantitative finance model tuning)
that constantly query InfluxDB for the same historical data repeatedly.
By saving query results in memory locally, load on your InfluxDB instance can be greatly reduced.
In order to enable/use caching functionality:
1. Install the necessary optional dependencies: ``pip install aioinflux[cache]``
2. Pass Redis host information when initializing :class:`.InfluxDBClient` with the ``redis_opts`` argument.
``redis_opts`` takes a dictionary with keyword arguments used when calling :func:`aioredis.create_redis`.
3. When using :meth:`~.InfluxDBClient.query` , set ``use_cache`` to ``True``.
Even when Redis is properly configured, cache will be ignored unless specified on a per-query basis.
Optionally, to control when the cache expires, use the ``cache_expiry`` argument of :class:`.InfluxDBClient`.
You can also just simply use Redis CLI to clear the cache:
.. code:: bash
redis-cli -n flushdb
In order to debug whether or not cache is being used or being hit/miss, enable the ``aioinflux`` logger
and set it to ``DEBUG`` level. See :ref:`Debugging` for more details.
.. _Redis: https://redis.io/
Other functionality
-------------------
Authentication
^^^^^^^^^^^^^^
Aioinflux supports basic HTTP authentication provided by :py:class:`aiohttp.BasicAuth`.
Simply pass ``username`` and ``password`` when instantiating |client|:
.. code:: python
client = InfluxDBClient(username='user', password='pass)
Unix domain sockets
^^^^^^^^^^^^^^^^^^^
If your InfluxDB server uses UNIX domain sockets you can use ``unix_socket``
when instantiating |client|:
.. code:: python
client = InfluxDBClient(unix_socket='/path/to/socket')
See |unix_connector|_ for details.
.. |unix_connector| replace:: ``aiohttp.UnixConnector``
.. _unix_connector: https://docs.aiohttp.org/en/stable/client_reference.html#aiohttp.UnixConnector
Custom timeouts
^^^^^^^^^^^^^^^
.. todo:: TODO
Other ``aiohttp`` functionality
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. todo:: Explain how to customize :class:`aiohttp.ClientSession` creation
HTTPS/SSL
^^^^^^^^^
Aioinflux/InfluxDB uses HTTP by default, but HTTPS can be used by passing ``ssl=True``
when instantiating |client|.
If you are acessing your your InfluxDB instance over the public internet, setting up HTTPS is
`strongly recommended `__.
.. code:: python
client = InfluxDBClient(host='my.host.io', ssl=True)
Database selection
^^^^^^^^^^^^^^^^^^
After the instantiation of the |client| object, database
can be switched by changing the ``db`` attribute:
.. code:: python
client = InfluxDBClient(db='db1')
client.db = 'db2'
Beware that differently from some NoSQL databases (such as MongoDB),
InfluxDB requires that a databases is explicitly created (by using the
|CREATE_DATABASE|_ query) before doing any operations on it.
.. |CREATE_DATABASE| replace:: ``CREATE DATABASE``
.. _`CREATE_DATABASE`: https://docs.influxdata.com/influxdb/latest/query_language/database_management/#create-database
Debugging
^^^^^^^^^
If you are having problems while using Aioinflux, enabling logging might be useful.
Below is a simple way to setup logging from your application:
.. code:: python
import logging
logging.basicConfig()
logging.getLogger('aioinflux').setLevel(logging.DEBUG)
For further information about logging, please refer to the
`official documentation `__.
.. |lineprotocol| replace:: :func:`~aioinflux.serialization.usertype.lineprotocol`
.. |client| replace:: :class:`~aioinflux.client.InfluxDBClient`
.. |write| replace:: :meth:`~aioinflux.client.InfluxDBClient.write`
.. |query| replace:: :meth:`~aioinflux.client.InfluxDBClient.query`
.. |iterpoints| replace:: :func:`~aioinflux.iterutils.iterpoints`
.. |datetimeindex| replace:: :class:`~pandas.DatetimeIndex`
.. |datetime| replace:: :py:class:`datetime.datetime`
aioinflux-0.9.0/notebooks/ 0000775 0000000 0000000 00000000000 13511520470 0015517 5 ustar 00root root 0000000 0000000 aioinflux-0.9.0/notebooks/datapoint_benchmark.ipynb 0000664 0000000 0000000 00000006633 13511520470 0022567 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"from aioinflux.serialization import serialize\n",
"from aioinflux.serialization.datapoint import datapoint, InfluxType"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"@datapoint\n",
"class Trade:\n",
" timestamp: InfluxType.TIMEINT\n",
" instrument: InfluxType.TAG\n",
" source: InfluxType.TAG\n",
" side: InfluxType.TAG\n",
" price: InfluxType.FLOAT\n",
" size: InfluxType.INT\n",
" trade_id: InfluxType.STR"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"trade = Trade(\n",
" timestamp=1540184368785116000,\n",
" instrument='AAPL',\n",
" source='NASDAQ',\n",
" side='BUY',\n",
" price=219.23,\n",
" size=100,\n",
" trade_id='34a1e085-3122-429c-9662-7ce82039d287'\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"trade_dict = {\n",
" 'time': 1540184368785116000,\n",
" 'measurement': 'Trade',\n",
" 'tags': {'instrument': 'AAPL', 'source': 'NASDAQ', 'side': 'BUY'},\n",
" 'fields': {'price': 219.23, 'size': 100,\n",
" 'trade_id': '34a1e085-3122-429c-9662-7ce82039d287'}\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"b'Trade,instrument=AAPL,source=NASDAQ,side=BUY size=100i,price=219.23,trade_id=\"34a1e085-3122-429c-9662-7ce82039d287\" 1540184368785116000'"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"trade.to_lineprotocol()"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"b'Trade,instrument=AAPL,source=NASDAQ,side=BUY price=219.23,size=100i,trade_id=\"34a1e085-3122-429c-9662-7ce82039d287\" 1540184368785116000'"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"serialize(trade_dict)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"3.35 µs ± 53.7 ns per loop (mean ± std. dev. of 7 runs, 100000 loops each)\n"
]
}
],
"source": [
"%timeit trade.to_lineprotocol()"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"10.7 µs ± 100 ns per loop (mean ± std. dev. of 7 runs, 100000 loops each)\n"
]
}
],
"source": [
"%timeit serialize(trade_dict)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.0"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
aioinflux-0.9.0/pytest.ini 0000664 0000000 0000000 00000000213 13511520470 0015541 0 ustar 00root root 0000000 0000000 [pytest]
log_cli=true
log_level=DEBUG
log_format = %(asctime)s | %(name)s | %(levelname)s: %(message)s
log_date_format = %Y-%m-%d %H:%M:%S
aioinflux-0.9.0/setup.cfg 0000664 0000000 0000000 00000000142 13511520470 0015332 0 ustar 00root root 0000000 0000000 [flake8]
ignore = F401,F403,F405,F841,W503,W504,RST304,D
max-line-length = 96
max-complexity = 16
aioinflux-0.9.0/setup.py 0000664 0000000 0000000 00000003400 13511520470 0015223 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
import sys
from setuptools import setup
from pathlib import Path
with open('README.rst', 'r') as f:
long_description = f.read()
meta = {}
with open(Path(__file__).parent / 'aioinflux' / '__init__.py') as f:
exec('\n'.join(l for l in f if l.startswith('__')), meta)
test_requirements = [
'pytest',
'pytest-asyncio',
'pytest-cov',
'pyyaml',
'pytz',
'flake8',
'pep8-naming',
# 'flake8-docstrings',
'flake8-rst-docstrings',
'pygments',
]
if sys.version_info[:2] == (3, 6):
test_requirements.append('dataclasses')
setup(name='aioinflux',
version=meta['__version__'],
description='Asynchronous Python client for InfluxDB',
long_description=long_description,
author='Gustavo Bezerra',
author_email='gusutabopb@gmail.com',
url='https://github.com/gusutabopb/aioinflux',
packages=[
'aioinflux',
'aioinflux.serialization',
],
include_package_data=True,
python_requires='>=3.6',
install_requires=['aiohttp>=3.0', 'ciso8601'],
extras_require={
'test': test_requirements,
'docs': [
'docutils',
'sphinx',
'sphinx_rtd_theme',
'sphinx-autodoc-typehints',
],
'pandas': [
'pandas>=0.21',
'numpy'
],
'cache': [
'aioredis>=1.2.0',
'lz4>=2.1.0',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Database',
])
aioinflux-0.9.0/tests/ 0000775 0000000 0000000 00000000000 13511520470 0014656 5 ustar 00root root 0000000 0000000 aioinflux-0.9.0/tests/conftest.py 0000664 0000000 0000000 00000002702 13511520470 0017056 0 ustar 00root root 0000000 0000000 import asyncio
import pytest
from aioinflux import InfluxDBClient
import testing_utils as utils
@pytest.yield_fixture(scope='module')
def event_loop():
loop = asyncio.new_event_loop()
yield loop
loop.close()
@pytest.fixture(scope='module')
async def client():
async with InfluxDBClient(db='client_test', mode='async') as client:
await client.create_database()
yield client
await client.drop_database()
@pytest.fixture(scope='module')
async def cache_client():
opts = dict(
db='cache_client_test',
redis_opts=dict(
address='redis://localhost:6379/8',
timeout=5,
),
cache_expiry=600
)
async with InfluxDBClient(**opts, mode='async') as client:
assert await client.create_database()
yield client
await client.drop_database()
await client._redis.flushdb()
@pytest.fixture(scope='module')
def df_client():
if utils.pd is None:
return
with InfluxDBClient(db='df_client_test', mode='blocking', output='dataframe') as client:
client.create_database()
yield client
client.drop_database()
@pytest.fixture(scope='module')
async def iter_client():
async with InfluxDBClient(db='iter_client_test', mode='async') as client:
await client.create_database()
await client.write([p for p in utils.cpu_load_generator(100)])
yield client
await client.drop_database()
aioinflux-0.9.0/tests/test_cache.py 0000664 0000000 0000000 00000001240 13511520470 0017327 0 ustar 00root root 0000000 0000000 import pytest
from testing_utils import logger, requires_redis
@requires_redis
@pytest.mark.asyncio
async def test_cache(cache_client):
assert await cache_client.write('foo bar=1')
q = 'SELECT bar from foo'
r1 = await cache_client.query(q, use_cache=True) # Create cache
logger.debug(r1)
assert await cache_client._redis.exists(f'aioinflux:{q}')
r2 = await cache_client.query(q, use_cache=True) # Get cache
logger.debug(r2)
r3 = await cache_client.query(q, use_cache=False) # Ignore cache
r1 = r1['results'][0]['series'][0]
r2 = r2['results'][0]['series'][0]
r3 = r3['results'][0]['series'][0]
assert r1 == r2 == r3
aioinflux-0.9.0/tests/test_client.py 0000664 0000000 0000000 00000020357 13511520470 0017554 0 ustar 00root root 0000000 0000000 import pytest
from aioinflux import InfluxDBClient, InfluxDBError, InfluxDBWriteError, iterpoints
from aioinflux.compat import pd
import testing_utils as utils
from testing_utils import logger
def test_repr(client):
logger.info(client)
@pytest.mark.asyncio
async def test_ping(client):
r = await client.ping()
assert 'X-Influxdb-Version' in r
#########
# Write #
#########
@pytest.mark.asyncio
async def test_write_simple(client):
assert await client.write(utils.random_points(100))
@pytest.mark.asyncio
async def test_write_string(client):
point = 'cpu_load_short,host=server02,region=us-west value=0.55 1422568543702900257'
assert await client.write(point)
@pytest.mark.asyncio
async def test_write_tagless(client):
point = b'cpu_load_short value=0.55 1423568543000000000'
assert await client.write(point)
@pytest.mark.asyncio
async def test_write_special_values(client):
point = utils.random_point()
point['tags']['boolean_tag'] = True
point['tags']['none_tag'] = None
point['tags']['blank_tag'] = ''
point['fields']['boolean_field'] = False
point['fields']['none_field'] = None
point['fields']['backslash'] = "This is a backslash: \\"
point['measurement'] = '"quo⚡️es and emoji"'
with pytest.warns(UserWarning) as e:
assert await client.write(point)
logger.warning(e)
@pytest.mark.asyncio
async def test_write_with_custom_measurement(client):
points = [p for p in utils.random_points(5)]
for p in points:
_ = p.pop('measurement')
logger.info(points)
with pytest.raises(ValueError):
assert await client.write(points)
assert await client.write(points, measurement='another_measurement')
resp = await client.query('SELECT * FROM another_measurement')
assert len(resp['results'][0]['series'][0]['values']) == 5
@pytest.mark.asyncio
async def test_write_without_timestamp(client):
points = [p for p in utils.random_points(9)]
for p in points:
_ = p.pop('time')
_ = p.pop('measurement')
logger.info(points)
assert await client.write(points, measurement='yet_another_measurement')
resp = await client.query('SELECT * FROM yet_another_measurement')
# Points with the same tag/timestamp set are overwritten
assert len(resp['results'][0]['series'][0]['values']) == 1
@pytest.mark.asyncio
async def test_write_non_string_identifier_and_tags(client):
point = dict(tags={1: 2},
fields={3: 4})
with pytest.warns(UserWarning):
assert await client.write(point, measurement='my_measurement')
resp = await client.query('SELECT * FROM my_measurement')
logger.info(resp)
assert len(resp['results'][0]['series'][0]['values']) == 1
@pytest.mark.asyncio
async def test_write_to_non_default_db(client):
points = [p for p in utils.random_points(5)]
await client.create_database(db='temp_db')
assert client.db != 'temp_db'
assert await client.write(points, db='temp_db')
resp = await client.query('SELECT * FROM temp_db..test_measurement')
logger.info(resp)
assert len(resp['results'][0]['series'][0]['values']) == 5
await client.drop_database(db='temp_db')
@pytest.mark.asyncio
async def test_write_to_non_default_rp(client):
db = client.db
await client.query(f"CREATE RETENTION POLICY myrp ON {db} DURATION 1h REPLICATION 1")
points = [p for p in utils.random_points(5)]
assert await client.write(points, rp='myrp')
resp = await client.query(f"SELECT * from {db}.myrp.test_measurement")
logger.info(resp)
assert len(resp['results'][0]['series'][0]['values']) == 5
#########
# Query #
#########
@pytest.mark.asyncio
async def test_simple_query(client):
resp = await client.query('SELECT * FROM test_measurement')
assert len(resp['results'][0]['series'][0]['values']) == 100
@pytest.mark.asyncio
async def test_chunked_query(client):
resp = await client.query('SELECT * FROM test_measurement',
chunked=True, chunk_size=10)
points = []
async for chunk in resp:
for point in iterpoints(chunk):
points.append(point)
assert len(points) == 100
@pytest.mark.asyncio
async def test_empty_chunked_query(client):
resp = await client.query('SELECT * FROM fake', chunked=True, chunk_size=10)
points = []
async for chunk in resp:
for point in iterpoints(chunk):
points.append(point)
assert len(points) == 0
####################
# Built-in queries #
####################
@pytest.mark.asyncio
async def test_create_database(client):
resp = await client.create_database(db='mytestdb')
assert resp
@pytest.mark.asyncio
async def test_drop_database(client):
resp = await client.drop_database(db='mytestdb')
assert resp
@pytest.mark.asyncio
async def test_drop_measurement(client):
measurement = utils.random_string()
assert await client.write(f'{measurement} foo=1')
await client.drop_measurement(measurement=measurement)
@pytest.mark.asyncio
async def test_show_databases(client):
r = await client.show_databases()
assert r
logger.debug(r)
@pytest.mark.asyncio
async def test_show_measurements(client):
r = await client.show_measurements()
assert r
logger.debug(r)
@pytest.mark.asyncio
async def test_show_users(client):
r = await client.show_users()
assert r
logger.debug(r)
@pytest.mark.asyncio
async def test_show_series(client):
r = await client.show_series()
assert r
logger.debug(r)
r = await client.show_series('cpu_load_short')
assert r
logger.debug(r)
@pytest.mark.asyncio
async def test_show_retention_policies(client):
r = await client.show_retention_policies()
assert r
logger.debug(r)
@pytest.mark.asyncio
async def test_show_tag_keys(client):
r = await client.show_tag_keys()
assert r
logger.debug(r)
r = await client.show_tag_keys('cpu_load_short')
assert r
logger.debug(r)
@pytest.mark.asyncio
async def test_show_field_keys(client):
r = await client.show_field_keys()
assert r
logger.debug(r)
r = await client.show_field_keys('cpu_load_short')
assert r
logger.debug(r)
@pytest.mark.asyncio
async def test_show_tag_values(client):
r = await client.show_tag_values('host')
assert r
logger.debug(r)
r = await client.show_tag_values('host', 'cpu_load_short')
assert r
logger.debug(r)
@pytest.mark.asyncio
async def test_show_continuous_queries(client):
r = await client.show_continuous_queries()
assert r
logger.debug(r)
###############
# Error tests #
###############
@pytest.mark.asyncio
async def test_chunked_query_error(client):
with pytest.raises(InfluxDBError) as e:
resp = await client.query('INVALID QUERY', chunked=True, chunk_size=10)
_ = [i async for i in resp]
logger.error(e)
@pytest.mark.asyncio
async def test_invalid_data_write(client):
with pytest.raises(InfluxDBWriteError) as e:
# Plain invalid data
await client.write(utils.random_string())
logger.error(e)
with pytest.raises(ValueError) as e:
# Pass function as input data
await client.write(utils.random_string)
logger.error(e)
with pytest.raises(ValueError) as e:
# Measurement missing
point = utils.random_point()
point.pop('measurement')
await client.write(point)
logger.error(e)
def test_invalid_client_mode():
with pytest.raises(ValueError) as e:
_ = InfluxDBClient(db='mytestdb', mode=utils.random_string())
logger.error(e)
def test_no_default_database_warning():
with pytest.warns(UserWarning) as e:
_ = InfluxDBClient(db=None)
logger.error(e)
def test_invalid_output_format(client):
with pytest.raises(ValueError) as e:
client.output = utils.random_string()
logger.error(e)
if pd is None:
with pytest.raises(ValueError) as e:
client.output = 'dataframe'
logger.error(e)
@pytest.mark.asyncio
async def test_invalid_query(client):
with pytest.raises(InfluxDBError) as e:
await client.query('NOT A VALID QUERY')
logger.error(e)
@pytest.mark.asyncio
async def test_statement_error(client):
with pytest.raises(InfluxDBError) as e:
await client.query('SELECT * FROM my_measurement', db='fake_db')
logger.error(e)
aioinflux-0.9.0/tests/test_dataframe.py 0000664 0000000 0000000 00000007475 13511520470 0020230 0 ustar 00root root 0000000 0000000 import pytest
import testing_utils as utils
from testing_utils import logger
from aioinflux.compat import pd, np
if pd is not None:
pd.set_option('display.max_columns', 10)
pd.set_option('display.width', 100)
@utils.requires_pandas
def test_write_dataframe(df_client):
df1 = utils.random_dataframe()
df2 = utils.random_dataframe()
df2.columns = df1.columns
assert df_client.write(df1, measurement='m1', mytag='foo', tag_columns=['tag'])
assert df_client.write(df2, measurement='m2', mytag='foo', tag_columns=['tag'])
assert df_client.write(utils.random_dataframe(), measurement='m3') # tag-less
@utils.requires_pandas
def test_write_dataframe_with_nan(df_client):
df = utils.trading_df()
df_client.write(df, f'fills00')
for i in range(10):
for _ in range(int(len(df) / 5)):
i = np.random.randint(df.shape[0])
j = np.random.randint(df.shape[1])
df.iloc[i, j] = np.nan
df_client.write(df, f'fills{i + 1:02d}')
@utils.requires_pandas
def test_select_into(df_client):
df_client.query("SELECT * INTO m2_copy from m2")
df = df_client.query('SELECT * from m2_copy')
assert df.shape == (50, 8)
logger.info(f'\n{df.head()}')
@utils.requires_pandas
def test_read_dataframe(df_client):
df = df_client.query('SELECT * from m1')
logger.info(f'\n{df.head()}')
assert df.shape == (50, 8)
@utils.requires_pandas
@pytest.mark.asyncio
async def test_dataframe_chunked_query(client):
client.output = 'dataframe'
df1 = utils.random_dataframe()
await client.write(df1, measurement='m3')
cursor = await client.query('SELECT * FROM m3', chunked=True, chunk_size=10)
dfs = []
async for subdf in cursor:
assert isinstance(subdf, pd.DataFrame)
assert len(subdf) == 10
dfs.append(subdf)
df = pd.concat(dfs)
assert df.shape == (50, 7)
client.output = 'json'
@utils.requires_pandas
def test_read_dataframe_groupby(df_client):
df_dict = df_client.query('SELECT max(*) from /m[1-2]$/ GROUP BY "tag"')
s = ['\n{}:\n{}'.format(k, v) for k, v in df_dict.items()]
logger.info('\n'.join(s))
m1 = pd.concat([df for k, df in df_dict.items() if k.split(',')[0] == 'm1'])
m2 = pd.concat([df for k, df in df_dict.items() if k.split(',')[0] == 'm2'])
assert m1.shape == (5, 6)
assert m2.shape == (5, 6)
@utils.requires_pandas
def test_read_dataframe_multistatement(df_client):
df_list = df_client.query('SELECT max(*) from m1;SELECT min(*) from m2')
logger.info(df_list)
assert type(df_list) is list
assert 'm1' in df_list[0]
assert 'm2' in df_list[1]
assert df_list[0]['m1'].shape == (1, 5)
assert df_list[1]['m2'].shape == (1, 5)
@utils.requires_pandas
def test_read_dataframe_show_databases(df_client):
df = df_client.show_databases()
assert isinstance(df.index, pd.RangeIndex)
assert 'name' in df.columns
logger.info(f'\n{df.head()}')
@utils.requires_pandas
@pytest.mark.asyncio
async def test_change_db(client):
state = client.db, client.output
client.output = 'dataframe'
client.db = 'foo'
await client.ping()
client.db, client.output = state
###############
# Error tests #
###############
@utils.requires_pandas
@pytest.mark.asyncio
async def test_invalid_data_write_dataframe(client):
with pytest.raises(ValueError) as e:
# Non-DatetimeIndex DataFrame
await client.write(utils.random_dataframe().reset_index(), measurement='foo')
logger.error(e)
with pytest.raises(ValueError) as e:
# DataFrame write without specifying measurement
await client.write(utils.random_dataframe())
logger.error(e)
@utils.requires_pandas
def test_chunked_dataframe(df_client):
with pytest.raises(ValueError) as e:
_ = df_client.query('SELECT * FROM foo', chunked=True)
logger.error(e)
aioinflux-0.9.0/tests/test_iter.py 0000664 0000000 0000000 00000002165 13511520470 0017236 0 ustar 00root root 0000000 0000000 import pytest
from aioinflux import iterpoints
from testing_utils import logger
@pytest.mark.asyncio
async def test_iterpoints_with_parser(iter_client):
r = await iter_client.query("SELECT * FROM cpu_load LIMIT 3")
parser = lambda *x, meta: dict(zip(meta['columns'], x)) # noqa
for i in iterpoints(r, parser):
logger.info(i)
assert 'time' in i
assert 'value' in i
assert 'host' in i
@pytest.mark.asyncio
async def test_aiter_point(iter_client):
resp = await iter_client.query('SELECT * from cpu_load', chunked=True, chunk_size=10)
points = []
async for chunk in resp:
for point in iterpoints(chunk):
points.append(point)
assert len(points) == 100
@pytest.mark.asyncio
async def test_iter_point_namedtuple(iter_client):
from collections import namedtuple
nt = namedtuple('cpu_load', ['time', 'direction', 'host', 'region', 'value'])
resp = await iter_client.query('SELECT * from cpu_load')
points = []
for point in iterpoints(resp, parser=nt):
points.append(point)
assert len(point) == 5
assert len(points) == 100
aioinflux-0.9.0/tests/test_serialization.py 0000664 0000000 0000000 00000001556 13511520470 0021153 0 ustar 00root root 0000000 0000000 from datetime import datetime
import pytz
import pytest
from aioinflux.serialization.mapping import _serialize_timestamp
from testing_utils import logger
from aioinflux.compat import pd
def test_timestamp_timezone_parsing():
dt_naive = datetime(2018, 1, 1)
dt_aware = datetime(2018, 1, 1, tzinfo=pytz.UTC)
str_naive = str(dt_naive)
str_aware = str(dt_aware)
for i in [dt_naive, dt_aware, str_naive, str_aware]:
assert _serialize_timestamp({'time': i}) == 1514764800000000000
@pytest.mark.skipif(pd is not None, reason='ciso8601-specific test')
def test_invalid_timestamp_parsing():
with pytest.raises(ValueError) as e:
_serialize_timestamp({'time': '2018/01/01'})
logger.error(e)
def test_invalid_timestamp_parsing2():
with pytest.raises(ValueError) as e:
_serialize_timestamp({'time': 'foo'})
logger.error(e)
aioinflux-0.9.0/tests/test_usertype.py 0000664 0000000 0000000 00000011606 13511520470 0020153 0 ustar 00root root 0000000 0000000 # flake8: noqa
import uuid
import enum
from datetime import datetime
from typing import NamedTuple, Optional
from collections import namedtuple
from dataclasses import dataclass
from decimal import Decimal
import pytest
import aioinflux
from aioinflux import lineprotocol, SchemaError
from testing_utils import logger
class CpuLoad(enum.Enum):
LOW = 10
HIGH = 100
@pytest.mark.asyncio
async def test_decorator(client):
@lineprotocol
class MyPoint(NamedTuple):
measurement: aioinflux.MEASUREMENT
time: aioinflux.TIMEINT
host: aioinflux.TAG
running: aioinflux.BOOL
users: aioinflux.INT
cpu_load: aioinflux.FLOAT
cpu_load_level: aioinflux.ENUM
cpu_load_level_tag: aioinflux.TAGENUM
running_cost: aioinflux.DECIMAL
uuid: aioinflux.STR
p = MyPoint(
measurement="dp",
time=1500,
host="us1",
running=True,
users=1000,
cpu_load=99.5,
cpu_load_level=CpuLoad.HIGH,
cpu_load_level_tag=CpuLoad.LOW,
running_cost=Decimal('3.54'),
uuid=str(uuid.uuid4()),
)
assert p
assert hasattr(p, 'to_lineprotocol')
assert await client.write(p)
logger.info(await client.query('SELECT * FROM dp'))
logger.info(await client.query("SHOW FIELD KEYS FROM dp"))
def test_functional():
schema = dict(
measurement=aioinflux.MEASUREMENT,
time=aioinflux.TIMEINT,
host=aioinflux.TAG,
running=aioinflux.BOOL,
users=aioinflux.INT,
)
MyPoint = lineprotocol(namedtuple('MyPoint', schema.keys()), schema=schema)
p = MyPoint("a", 2, "b", False, 5)
logger.debug(p.to_lineprotocol())
assert isinstance(p.to_lineprotocol(), bytes)
def test_datestr():
schema = dict(
measurement=aioinflux.MEASUREMENT,
time=aioinflux.TIMESTR,
host=aioinflux.TAG,
running=aioinflux.BOOL,
users=aioinflux.INT,
)
MyPoint = lineprotocol(namedtuple('MyPoint', schema.keys()), schema=schema)
p = MyPoint("a", "2018-08-08 15:22:33", "b", False, 5)
logger.debug(p.to_lineprotocol())
assert isinstance(p.to_lineprotocol(), bytes)
def test_datetime():
schema = dict(
measurement=aioinflux.MEASUREMENT,
time=aioinflux.TIMEDT,
host=aioinflux.TAG,
running=aioinflux.BOOL,
users=aioinflux.INT,
)
MyPoint = lineprotocol(namedtuple('MyPoint', schema.keys()), schema=schema)
p = MyPoint("a", datetime.utcnow(), "b", False, 5)
logger.debug(p.to_lineprotocol())
assert isinstance(p.to_lineprotocol(), bytes)
def test_placeholder():
@lineprotocol(placeholder=True)
@dataclass
class MyPoint:
timestamp: aioinflux.TIMEINT
lp = MyPoint(0).to_lineprotocol()
logger.debug(lp)
def test_extra_tags():
@lineprotocol(extra_tags={'host': 'ap1'})
class MyPoint(NamedTuple):
measurement: aioinflux.MEASUREMENT
time: aioinflux.TIMEINT
running: aioinflux.BOOL
users: aioinflux.INT
p = MyPoint("a", 2, False, 5)
assert b'ap1' in p.to_lineprotocol()
def test_rm_none():
@lineprotocol(rm_none=True)
class MyPoint(NamedTuple):
measurement: aioinflux.MEASUREMENT
time: aioinflux.TIMEINT
host: aioinflux.TAG
running: Optional[aioinflux.BOOL]
users: Optional[aioinflux.INT]
p1= MyPoint("a", 2, "b", True, None)
p2 = MyPoint("a", 2, "b", None, 1)
logger.debug(p1.to_lineprotocol())
logger.debug(p2.to_lineprotocol())
assert b'users' not in p1.to_lineprotocol()
assert b'running' not in p2.to_lineprotocol()
# noinspection PyUnusedLocal
def test_schema_error():
with pytest.raises(SchemaError):
@lineprotocol
class MyPoint:
pass
with pytest.raises(SchemaError):
@lineprotocol # noqa: F811
class MyPoint(NamedTuple):
measurement: aioinflux.MEASUREMENT
time: aioinflux.TIMEINT
host: aioinflux.TAG
running: bool
users: aioinflux.INT
with pytest.raises(SchemaError):
@lineprotocol # noqa: F811
class MyPoint(NamedTuple):
measurement: aioinflux.MEASUREMENT
measurement2: aioinflux.MEASUREMENT
time: aioinflux.TIMEINT
host: aioinflux.TAG
running: aioinflux.BOOL
users: aioinflux.INT
with pytest.raises(SchemaError):
@lineprotocol # noqa: F811
class MyPoint(NamedTuple):
measurement: aioinflux.MEASUREMENT
time: aioinflux.TIMEINT
time2: aioinflux.TIMEDT
host: aioinflux.TAG
running: aioinflux.BOOL
users: aioinflux.INT
with pytest.raises(SchemaError):
@lineprotocol # noqa: F811
class MyPoint(NamedTuple):
measurement: aioinflux.MEASUREMENT
time: aioinflux.TIMEINT
host: aioinflux.TAG
aioinflux-0.9.0/tests/testing_utils.py 0000664 0000000 0000000 00000005044 13511520470 0020130 0 ustar 00root root 0000000 0000000 import datetime
import logging
import random
import string
import uuid
from itertools import combinations, cycle, islice
from aioinflux.compat import *
import pytest
requires_pandas = pytest.mark.skipif(pd is None, reason=no_pandas_warning)
requires_redis = pytest.mark.skipif(aioredis is None, reason=no_redis_warning)
logger = logging.getLogger('aioinflux')
def random_point():
now = datetime.datetime.now()
point = {
'measurement': 'test_measurement', # noqa
'tags': {'tag key with sp🚀ces': 'tag,value,with"commas"'},
'time': random.choice([now, str(now)]),
'fields': {
'fi\neld_k\ey': random.randint(0, 200),
'quote': '"',
'value': random.random(),
}
}
return point
def random_points(n=10):
for i in range(n):
yield random_point()
def random_dataframe():
"""Generates a DataFrame with five random walk columns and a tag column"""
arr = np.cumsum(np.random.randn(50, 5), axis=1)
letters = combinations(string.ascii_uppercase, 3)
columns = [''.join(triplet) for triplet in random.choices(list(letters), k=5)]
tags = [chr(i + 65) for i in np.random.randint(0, 5, 50)]
ix = pd.date_range(end=pd.Timestamp.utcnow(), periods=50, freq='90min')
df = pd.DataFrame(arr, columns=columns)
df['tag'] = tags
df['noise'] = list(islice(cycle(["a", '\n', r"\n"]), 50))
df.index = ix
return df
def trading_df(n=100):
sym = [''.join(i) for i in combinations('ABCDE', 3)]
df = pd.DataFrame({
'str_id': [str(uuid.uuid4()) for _ in range(n)],
'px': 1000 + np.cumsum(np.random.randint(-10, 11, n)) / 2,
'sym': np.random.choice(sym, n),
'side': np.random.choice(['BUY', 'SELL'], n),
'size': np.random.randint(1, 10, size=n) * 100,
'valid': np.random.randint(2, size=n).astype('bool')
})
df.index = pd.date_range(end=pd.Timestamp.utcnow(), periods=n, freq='1s')
df['side'] = df['side'].astype('category')
return df
def random_string():
return ''.join(random.choices(string.ascii_lowercase, k=random.randint(4, 10)))
def cpu_load_generator(n):
p = 'cpu_load,direction={d},host=server{s:02d},region=us-{r} value={f:.5f} {t}'
t = 1520535379386016000
d = ['in', 'out']
r = ['north', 'south', 'west', 'east']
for _ in range(n):
t += random.randint(1, 10 ** 10)
yield p.format(
t=t,
d=random.choice(d),
r=random.choice(r),
s=random.randint(1, 99),
f=random.random() * 10,
)
aioinflux-0.9.0/tox.ini 0000664 0000000 0000000 00000000000 13511520470 0015015 0 ustar 00root root 0000000 0000000