influxdb-2.12.0/0000755000175000017500000000000012652700261014200 5ustar reazemreazem00000000000000influxdb-2.12.0/dev-requirements.txt0000644000175000017500000000010512652700251020233 0ustar reazemreazem00000000000000requests nose mock pandas Sphinx==1.2.3 sphinx_rtd_theme wheel twine influxdb-2.12.0/test-requirements.txt0000644000175000017500000000004012652700251020432 0ustar reazemreazem00000000000000nose nose-cov mock requests-mockinfluxdb-2.12.0/setup.py0000755000175000017500000000304712652700251015720 0ustar reazemreazem00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- try: import distribute_setup distribute_setup.use_setuptools() except: pass try: from setuptools import setup, find_packages except ImportError: from distutils.core import setup import os import re with open(os.path.join(os.path.dirname(__file__), 'influxdb', '__init__.py')) as f: version = re.search("__version__ = '([^']+)'", f.read()).group(1) with open('requirements.txt', 'r') as f: requires = [x.strip() for x in f if x.strip()] with open('test-requirements.txt', 'r') as f: test_requires = [x.strip() for x in f if x.strip()] with open('README.rst', 'r') as f: readme = f.read() setup( name='influxdb', version=version, description="InfluxDB client", long_description=readme, url='https://github.com/influxdb/influxdb-python', license='MIT License', packages=find_packages(exclude=['tests']), test_suite='tests', tests_require=test_requires, install_requires=requires, extras_require={'test': test_requires}, classifiers=( 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules', ), ) influxdb-2.12.0/README.rst0000644000175000017500000001061212652700251015666 0ustar reazemreazem00000000000000 InfluxDB-Python is a client for interacting with InfluxDB_. .. image:: https://travis-ci.org/influxdata/influxdb-python.svg?branch=master :target: https://travis-ci.org/influxdata/influxdb-python .. image:: https://readthedocs.org/projects/influxdb-python/badge/?version=latest&style :target: http://influxdb-python.readthedocs.org/ :alt: Documentation Status .. image:: https://img.shields.io/coveralls/influxdb/influxdb-python.svg :target: https://coveralls.io/r/influxdb/influxdb-python :alt: Coverage .. _readme-about: InfluxDB is an open-source distributed time series database, find more about InfluxDB_ at http://influxdata.com/ .. _installation: InfluxDB v0.8.X users ===================== InfluxDB 0.9 was released and it is the new recommended version. However, InfluxDB 0.8.x users may still use the legacy client by using ``from influxdb.influxdb08 import InfluxDBClient`` instead. Installation ============ Install, upgrade and uninstall InfluxDB-Python with these commands:: $ pip install influxdb $ pip install --upgrade influxdb $ pip uninstall influxdb On Debian/Ubuntu, you can install it with this command:: $ sudo apt-get install python-influxdb Dependencies ============ The InfluxDB-Python distribution is supported and tested on Python 2.6, 2.7, 3.2, 3.3, 3.4, PyPy and PyPy3. **Note:** Python 3.2 is currently untested. See ``.travis.yml``. Main dependency is: - Requests: HTTP library for human beings (http://docs.python-requests.org/) Additional dependencies are: - pandas: for writing from and reading to DataFrames (http://pandas.pydata.org/) - Sphinx: Tool to create and manage the documentation (http://sphinx-doc.org/) - Nose: to auto-discover tests (http://nose.readthedocs.org/en/latest/) - Mock: to mock tests (https://pypi.python.org/pypi/mock) Documentation ============= InfluxDB-Python documentation is available at http://influxdb-python.readthedocs.org You will need Sphinx_ installed to generate the documentation. The documentation can be generated by running:: $ tox -e docs Generated documentation can be found in the *docs/build/html/* directory. Examples ======== Here's a basic example (for more see the examples directory):: $ python >>> from influxdb import InfluxDBClient >>> json_body = [ { "measurement": "cpu_load_short", "tags": { "host": "server01", "region": "us-west" }, "time": "2009-11-10T23:00:00Z", "fields": { "value": 0.64 } } ] >>> client = InfluxDBClient('localhost', 8086, 'root', 'root', 'example') >>> client.create_database('example') >>> client.write_points(json_body) >>> result = client.query('select value from cpu_load_short;') >>> print("Result: {0}".format(result)) If you want to connect to a cluster, you could initialize a ``InfluxDBClusterClient``:: $ python >>> from influxdb import InfluxDBClusterClient >>> cc = InfluxDBClusterClient(hosts = [('192.168.0.1', 8086), ('192.168.0.2', 8086), ('192.168.0.3', 8086)], username='root', password='root', database='example') ``InfluxDBClusterClient`` has the same methods as ``InfluxDBClient``, it basically is a proxy to multiple InfluxDBClients. Testing ======= Make sure you have tox by running the following:: $ pip install tox To test influxdb-python with multiple version of Python, you can use Tox_:: $ tox Support ======= For issues with, questions about, or feedback for InfluxDB_, please look into our community page: http://influxdb.com/community/. Development =========== All development is done on Github_. Use Issues_ to report problems or submit contributions. .. _Github: https://github.com/influxdb/influxdb-python/ .. _Issues: https://github.com/influxdb/influxdb-python/issues TODO ==== The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdata/influxdb-python/issues Source code =========== The source code is currently available on Github: https://github.com/influxdata/influxdb-python .. _InfluxDB: https://influxdata.com/time-series-platform/influxdb/ .. _Sphinx: http://sphinx.pocoo.org/ .. _Tox: https://tox.readthedocs.org influxdb-2.12.0/requirements.txt0000644000175000017500000000006712652700251017466 0ustar reazemreazem00000000000000python-dateutil>=2.0.0 pytz requests>=1.0.3 six>=1.9.0 influxdb-2.12.0/LICENSE0000644000175000017500000000206312652700251015205 0ustar reazemreazem00000000000000The MIT License (MIT) Copyright (c) 2013 InfluxDB Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. influxdb-2.12.0/PKG-INFO0000644000175000017500000001463312652700261015304 0ustar reazemreazem00000000000000Metadata-Version: 1.1 Name: influxdb Version: 2.12.0 Summary: InfluxDB client Home-page: https://github.com/influxdb/influxdb-python Author: UNKNOWN Author-email: UNKNOWN License: MIT License Description: InfluxDB-Python is a client for interacting with InfluxDB_. .. image:: https://travis-ci.org/influxdata/influxdb-python.svg?branch=master :target: https://travis-ci.org/influxdata/influxdb-python .. image:: https://readthedocs.org/projects/influxdb-python/badge/?version=latest&style :target: http://influxdb-python.readthedocs.org/ :alt: Documentation Status .. image:: https://img.shields.io/coveralls/influxdb/influxdb-python.svg :target: https://coveralls.io/r/influxdb/influxdb-python :alt: Coverage .. _readme-about: InfluxDB is an open-source distributed time series database, find more about InfluxDB_ at http://influxdata.com/ .. _installation: InfluxDB v0.8.X users ===================== InfluxDB 0.9 was released and it is the new recommended version. However, InfluxDB 0.8.x users may still use the legacy client by using ``from influxdb.influxdb08 import InfluxDBClient`` instead. Installation ============ Install, upgrade and uninstall InfluxDB-Python with these commands:: $ pip install influxdb $ pip install --upgrade influxdb $ pip uninstall influxdb On Debian/Ubuntu, you can install it with this command:: $ sudo apt-get install python-influxdb Dependencies ============ The InfluxDB-Python distribution is supported and tested on Python 2.6, 2.7, 3.2, 3.3, 3.4, PyPy and PyPy3. **Note:** Python 3.2 is currently untested. See ``.travis.yml``. Main dependency is: - Requests: HTTP library for human beings (http://docs.python-requests.org/) Additional dependencies are: - pandas: for writing from and reading to DataFrames (http://pandas.pydata.org/) - Sphinx: Tool to create and manage the documentation (http://sphinx-doc.org/) - Nose: to auto-discover tests (http://nose.readthedocs.org/en/latest/) - Mock: to mock tests (https://pypi.python.org/pypi/mock) Documentation ============= InfluxDB-Python documentation is available at http://influxdb-python.readthedocs.org You will need Sphinx_ installed to generate the documentation. The documentation can be generated by running:: $ tox -e docs Generated documentation can be found in the *docs/build/html/* directory. Examples ======== Here's a basic example (for more see the examples directory):: $ python >>> from influxdb import InfluxDBClient >>> json_body = [ { "measurement": "cpu_load_short", "tags": { "host": "server01", "region": "us-west" }, "time": "2009-11-10T23:00:00Z", "fields": { "value": 0.64 } } ] >>> client = InfluxDBClient('localhost', 8086, 'root', 'root', 'example') >>> client.create_database('example') >>> client.write_points(json_body) >>> result = client.query('select value from cpu_load_short;') >>> print("Result: {0}".format(result)) If you want to connect to a cluster, you could initialize a ``InfluxDBClusterClient``:: $ python >>> from influxdb import InfluxDBClusterClient >>> cc = InfluxDBClusterClient(hosts = [('192.168.0.1', 8086), ('192.168.0.2', 8086), ('192.168.0.3', 8086)], username='root', password='root', database='example') ``InfluxDBClusterClient`` has the same methods as ``InfluxDBClient``, it basically is a proxy to multiple InfluxDBClients. Testing ======= Make sure you have tox by running the following:: $ pip install tox To test influxdb-python with multiple version of Python, you can use Tox_:: $ tox Support ======= For issues with, questions about, or feedback for InfluxDB_, please look into our community page: http://influxdb.com/community/. Development =========== All development is done on Github_. Use Issues_ to report problems or submit contributions. .. _Github: https://github.com/influxdb/influxdb-python/ .. _Issues: https://github.com/influxdb/influxdb-python/issues TODO ==== The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdata/influxdb-python/issues Source code =========== The source code is currently available on Github: https://github.com/influxdata/influxdb-python .. _InfluxDB: https://influxdata.com/time-series-platform/influxdb/ .. _Sphinx: http://sphinx.pocoo.org/ .. _Tox: https://tox.readthedocs.org Platform: UNKNOWN Classifier: Development Status :: 3 - Alpha Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: Software Development :: Libraries :: Python Modules influxdb-2.12.0/MANIFEST.in0000644000175000017500000000014412652700251015734 0ustar reazemreazem00000000000000include requirements.txt include test-requirements.txt include dev-requirements.txt include LICENSE influxdb-2.12.0/influxdb.egg-info/0000755000175000017500000000000012652700261017505 5ustar reazemreazem00000000000000influxdb-2.12.0/influxdb.egg-info/requires.txt0000644000175000017500000000014012652700261022100 0ustar reazemreazem00000000000000python-dateutil>=2.0.0 pytz requests>=1.0.3 six>=1.9.0 [test] nose nose-cov mock requests-mock influxdb-2.12.0/influxdb.egg-info/dependency_links.txt0000644000175000017500000000000112652700261023553 0ustar reazemreazem00000000000000 influxdb-2.12.0/influxdb.egg-info/PKG-INFO0000644000175000017500000001463312652700261020611 0ustar reazemreazem00000000000000Metadata-Version: 1.1 Name: influxdb Version: 2.12.0 Summary: InfluxDB client Home-page: https://github.com/influxdb/influxdb-python Author: UNKNOWN Author-email: UNKNOWN License: MIT License Description: InfluxDB-Python is a client for interacting with InfluxDB_. .. image:: https://travis-ci.org/influxdata/influxdb-python.svg?branch=master :target: https://travis-ci.org/influxdata/influxdb-python .. image:: https://readthedocs.org/projects/influxdb-python/badge/?version=latest&style :target: http://influxdb-python.readthedocs.org/ :alt: Documentation Status .. image:: https://img.shields.io/coveralls/influxdb/influxdb-python.svg :target: https://coveralls.io/r/influxdb/influxdb-python :alt: Coverage .. _readme-about: InfluxDB is an open-source distributed time series database, find more about InfluxDB_ at http://influxdata.com/ .. _installation: InfluxDB v0.8.X users ===================== InfluxDB 0.9 was released and it is the new recommended version. However, InfluxDB 0.8.x users may still use the legacy client by using ``from influxdb.influxdb08 import InfluxDBClient`` instead. Installation ============ Install, upgrade and uninstall InfluxDB-Python with these commands:: $ pip install influxdb $ pip install --upgrade influxdb $ pip uninstall influxdb On Debian/Ubuntu, you can install it with this command:: $ sudo apt-get install python-influxdb Dependencies ============ The InfluxDB-Python distribution is supported and tested on Python 2.6, 2.7, 3.2, 3.3, 3.4, PyPy and PyPy3. **Note:** Python 3.2 is currently untested. See ``.travis.yml``. Main dependency is: - Requests: HTTP library for human beings (http://docs.python-requests.org/) Additional dependencies are: - pandas: for writing from and reading to DataFrames (http://pandas.pydata.org/) - Sphinx: Tool to create and manage the documentation (http://sphinx-doc.org/) - Nose: to auto-discover tests (http://nose.readthedocs.org/en/latest/) - Mock: to mock tests (https://pypi.python.org/pypi/mock) Documentation ============= InfluxDB-Python documentation is available at http://influxdb-python.readthedocs.org You will need Sphinx_ installed to generate the documentation. The documentation can be generated by running:: $ tox -e docs Generated documentation can be found in the *docs/build/html/* directory. Examples ======== Here's a basic example (for more see the examples directory):: $ python >>> from influxdb import InfluxDBClient >>> json_body = [ { "measurement": "cpu_load_short", "tags": { "host": "server01", "region": "us-west" }, "time": "2009-11-10T23:00:00Z", "fields": { "value": 0.64 } } ] >>> client = InfluxDBClient('localhost', 8086, 'root', 'root', 'example') >>> client.create_database('example') >>> client.write_points(json_body) >>> result = client.query('select value from cpu_load_short;') >>> print("Result: {0}".format(result)) If you want to connect to a cluster, you could initialize a ``InfluxDBClusterClient``:: $ python >>> from influxdb import InfluxDBClusterClient >>> cc = InfluxDBClusterClient(hosts = [('192.168.0.1', 8086), ('192.168.0.2', 8086), ('192.168.0.3', 8086)], username='root', password='root', database='example') ``InfluxDBClusterClient`` has the same methods as ``InfluxDBClient``, it basically is a proxy to multiple InfluxDBClients. Testing ======= Make sure you have tox by running the following:: $ pip install tox To test influxdb-python with multiple version of Python, you can use Tox_:: $ tox Support ======= For issues with, questions about, or feedback for InfluxDB_, please look into our community page: http://influxdb.com/community/. Development =========== All development is done on Github_. Use Issues_ to report problems or submit contributions. .. _Github: https://github.com/influxdb/influxdb-python/ .. _Issues: https://github.com/influxdb/influxdb-python/issues TODO ==== The TODO/Roadmap can be found in Github bug tracker: https://github.com/influxdata/influxdb-python/issues Source code =========== The source code is currently available on Github: https://github.com/influxdata/influxdb-python .. _InfluxDB: https://influxdata.com/time-series-platform/influxdb/ .. _Sphinx: http://sphinx.pocoo.org/ .. _Tox: https://tox.readthedocs.org Platform: UNKNOWN Classifier: Development Status :: 3 - Alpha Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: Software Development :: Libraries :: Python Modules influxdb-2.12.0/influxdb.egg-info/SOURCES.txt0000644000175000017500000000235112652700261021372 0ustar reazemreazem00000000000000LICENSE MANIFEST.in README.rst dev-requirements.txt requirements.txt setup.cfg setup.py test-requirements.txt influxdb/__init__.py influxdb/_dataframe_client.py influxdb/chunked_json.py influxdb/client.py influxdb/dataframe_client.py influxdb/exceptions.py influxdb/helper.py influxdb/line_protocol.py influxdb/resultset.py influxdb.egg-info/PKG-INFO influxdb.egg-info/SOURCES.txt influxdb.egg-info/dependency_links.txt influxdb.egg-info/requires.txt influxdb.egg-info/top_level.txt influxdb/influxdb08/__init__.py influxdb/influxdb08/chunked_json.py influxdb/influxdb08/client.py influxdb/influxdb08/dataframe_client.py influxdb/influxdb08/helper.py influxdb/tests/__init__.py influxdb/tests/chunked_json_test.py influxdb/tests/client_test.py influxdb/tests/dataframe_client_test.py influxdb/tests/helper_test.py influxdb/tests/misc.py influxdb/tests/resultset_test.py influxdb/tests/test_line_protocol.py influxdb/tests/influxdb08/__init__.py influxdb/tests/influxdb08/client_test.py influxdb/tests/influxdb08/dataframe_client_test.py influxdb/tests/influxdb08/helper_test.py influxdb/tests/server_tests/__init__.py influxdb/tests/server_tests/base.py influxdb/tests/server_tests/client_test_with_server.py influxdb/tests/server_tests/influxdb_instance.pyinfluxdb-2.12.0/influxdb.egg-info/top_level.txt0000644000175000017500000000001112652700261022227 0ustar reazemreazem00000000000000influxdb influxdb-2.12.0/influxdb/0000755000175000017500000000000012652700261016013 5ustar reazemreazem00000000000000influxdb-2.12.0/influxdb/exceptions.py0000644000175000017500000000124212652700251020544 0ustar reazemreazem00000000000000class InfluxDBClientError(Exception): """Raised when an error occurs in the request.""" def __init__(self, content, code=None): if isinstance(content, type(b'')): content = content.decode('UTF-8', 'replace') if code is not None: message = "%s: %s" % (code, content) else: message = content super(InfluxDBClientError, self).__init__( message ) self.content = content self.code = code class InfluxDBServerError(Exception): """Raised when a server error occurs.""" def __init__(self, content): super(InfluxDBServerError, self).__init__(content) influxdb-2.12.0/influxdb/__init__.py0000644000175000017500000000047312652700251020127 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- from .client import InfluxDBClient from .client import InfluxDBClusterClient from .dataframe_client import DataFrameClient from .helper import SeriesHelper __all__ = [ 'InfluxDBClient', 'InfluxDBClusterClient', 'DataFrameClient', 'SeriesHelper', ] __version__ = '2.12.0' influxdb-2.12.0/influxdb/line_protocol.py0000644000175000017500000000752212652700251021242 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- from __future__ import unicode_literals from calendar import timegm from copy import copy from datetime import datetime from numbers import Integral from dateutil.parser import parse from six import binary_type, text_type, integer_types def _convert_timestamp(timestamp, precision=None): if isinstance(timestamp, Integral): return timestamp # assume precision is correct if timestamp is int if isinstance(_get_unicode(timestamp), text_type): timestamp = parse(timestamp) if isinstance(timestamp, datetime): ns = ( timegm(timestamp.utctimetuple()) * 1e9 + timestamp.microsecond * 1e3 ) if precision is None or precision == 'n': return ns elif precision == 'u': return ns / 1e3 elif precision == 'ms': return ns / 1e6 elif precision == 's': return ns / 1e9 elif precision == 'm': return ns / 1e9 / 60 elif precision == 'h': return ns / 1e9 / 3600 raise ValueError(timestamp) def _escape_tag(tag): tag = _get_unicode(tag, force=True) return tag.replace( "\\", "\\\\" ).replace( " ", "\\ " ).replace( ",", "\\," ).replace( "=", "\\=" ) def _escape_value(value): value = _get_unicode(value) if isinstance(value, text_type) and value != '': return "\"{0}\"".format( value.replace( "\"", "\\\"" ).replace( "\n", "\\n" ) ) elif isinstance(value, integer_types) and not isinstance(value, bool): return str(value) + 'i' else: return str(value) def _get_unicode(data, force=False): """ Try to return a text aka unicode object from the given data. """ if isinstance(data, binary_type): return data.decode('utf-8') elif data is None: return '' elif force: return str(data) else: return data def make_lines(data, precision=None): """ Extracts the points from the given dict and returns a Unicode string matching the line protocol introduced in InfluxDB 0.9.0. """ lines = [] static_tags = data.get('tags', None) for point in data['points']: elements = [] # add measurement name measurement = _escape_tag(_get_unicode( point.get('measurement', data.get('measurement')) )) key_values = [measurement] # add tags if static_tags is None: tags = point.get('tags', {}) else: tags = copy(static_tags) tags.update(point.get('tags', {})) # tags should be sorted client-side to take load off server for tag_key in sorted(tags.keys()): key = _escape_tag(tag_key) value = _escape_tag(tags[tag_key]) if key != '' and value != '': key_values.append("{key}={value}".format(key=key, value=value)) key_values = ','.join(key_values) elements.append(key_values) # add fields field_values = [] for field_key in sorted(point['fields'].keys()): key = _escape_tag(field_key) value = _escape_value(point['fields'][field_key]) if key != '' and value != '': field_values.append("{key}={value}".format( key=key, value=value )) field_values = ','.join(field_values) elements.append(field_values) # add timestamp if 'time' in point: timestamp = _get_unicode(str(int( _convert_timestamp(point['time'], precision) ))) elements.append(timestamp) line = ' '.join(elements) lines.append(line) lines = '\n'.join(lines) return lines + '\n' influxdb-2.12.0/influxdb/resultset.py0000644000175000017500000001376212652700251020427 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- import warnings from influxdb.exceptions import InfluxDBClientError _sentinel = object() class ResultSet(object): """A wrapper around a single InfluxDB query result""" def __init__(self, series, raise_errors=True): self._raw = series self._error = self.raw.get('error', None) if self.error is not None and raise_errors is True: raise InfluxDBClientError(self.error) @property def raw(self): """Raw JSON from InfluxDB""" return self._raw @raw.setter def raw(self, value): self._raw = value @property def error(self): """Error returned by InfluxDB""" return self._error def __getitem__(self, key): """ :param key: Either a serie name, or a tags_dict, or a 2-tuple(serie_name, tags_dict). If the serie name is None (or not given) then any serie matching the eventual given tags will be given its points one after the other. To get the points of every serie in this resultset then you have to provide None as key. :return: A generator yielding `Point`s matching the given key. NB: The order in which the points are yielded is actually undefined but it might change.. """ warnings.warn( ("ResultSet's ``__getitem__`` method will be deprecated. Use" "``get_points`` instead."), DeprecationWarning ) if isinstance(key, tuple): if 2 != len(key): raise TypeError('only 2-tuples allowed') name = key[0] tags = key[1] if not isinstance(tags, dict) and tags is not None: raise TypeError('tags should be a dict') elif isinstance(key, dict): name = None tags = key else: name = key tags = None return self.get_points(name, tags) def get_points(self, measurement=None, tags=None): """ Returns a generator for all the points that match the given filters. :param measurement: The measurement name :type measurement: str :param tags: Tags to look for :type tags: dict :return: Points generator """ # Raise error if measurement is not str or bytes if not isinstance(measurement, (bytes, type(b''.decode()), type(None))): raise TypeError('measurement must be an str or None') for serie in self._get_series(): serie_name = serie.get('measurement', serie.get('name', 'results')) if serie_name is None: # this is a "system" query or a query which # doesn't return a name attribute. # like 'show retention policies' .. if tags is None: for item in self._get_points_for_serie(serie): yield item elif measurement in (None, serie_name): # by default if no tags was provided then # we will matches every returned serie serie_tags = serie.get('tags', {}) if tags is None or self._tag_matches(serie_tags, tags): for item in self._get_points_for_serie(serie): yield item def __repr__(self): items = [] for item in self.items(): items.append("'%s': %s" % (item[0], list(item[1]))) return "ResultSet({%s})" % ", ".join(items) def __iter__(self): """ Iterating a ResultSet will yield one dict instance per serie result. """ for key in self.keys(): yield list(self.__getitem__(key)) def _tag_matches(self, tags, filter): """Checks if all key/values in filter match in tags""" for tag_name, tag_value in filter.items(): # using _sentinel as I'm not sure that "None" # could be used, because it could be a valid # serie_tags value : when a serie has no such tag # then I think it's set to /null/None/.. TBC.. serie_tag_value = tags.get(tag_name, _sentinel) if serie_tag_value != tag_value: return False return True def _get_series(self): """Returns all series""" return self.raw.get('series', []) def __len__(self): return len(self.keys()) def keys(self): """ :return: List of keys. Keys are tuples (serie_name, tags) """ keys = [] for serie in self._get_series(): keys.append( (serie.get('measurement', serie.get('name', 'results')), serie.get('tags', None)) ) return keys def items(self): """ :return: List of tuples, (key, generator) """ items = [] for serie in self._get_series(): serie_key = (serie.get('measurement', serie.get('name', 'results')), serie.get('tags', None)) items.append( (serie_key, self._get_points_for_serie(serie)) ) return items def _get_points_for_serie(self, serie): """ Return generator of dict from columns and values of a serie :param serie: One serie :return: Generator of dicts """ for point in serie.get('values', []): yield self.point_from_cols_vals( serie['columns'], point ) @staticmethod def point_from_cols_vals(cols, vals): """ Creates a dict from columns and values lists :param cols: List of columns :param vals: List of values :return: Dict where keys are columns. """ point = {} for col_index, col_name in enumerate(cols): point[col_name] = vals[col_index] return point influxdb-2.12.0/influxdb/_dataframe_client.py0000644000175000017500000001333212652700251022007 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ DataFrame client for InfluxDB """ import math import pandas as pd from .client import InfluxDBClient def _pandas_time_unit(time_precision): unit = time_precision if time_precision == 'm': unit = 'ms' elif time_precision == 'u': unit = 'us' elif time_precision == 'n': unit = 'ns' assert unit in ('s', 'ms', 'us', 'ns') return unit class DataFrameClient(InfluxDBClient): """ The ``DataFrameClient`` object holds information necessary to connect to InfluxDB. Requests can be made to InfluxDB directly through the client. The client reads and writes from pandas DataFrames. """ EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') def write_points(self, dataframe, measurement, tags=None, time_precision=None, database=None, retention_policy=None, batch_size=None): """ Write to multiple time series names. :param dataframe: data points in a DataFrame :param measurement: name of measurement :param tags: dictionary of tags, with string key-values :param time_precision: [Optional, default None] Either 's', 'ms', 'u' or 'n'. :param batch_size: [Optional] Value to write the points in batches instead of all at one time. Useful for when doing data dumps from one database to another or when doing a massive write operation :type batch_size: int """ if batch_size: number_batches = int(math.ceil( len(dataframe) / float(batch_size))) for batch in range(number_batches): start_index = batch * batch_size end_index = (batch + 1) * batch_size points = self._convert_dataframe_to_json( dataframe.ix[start_index:end_index].copy(), measurement, tags, time_precision ) super(DataFrameClient, self).write_points( points, time_precision, database, retention_policy) return True else: points = self._convert_dataframe_to_json( dataframe, measurement, tags, time_precision ) super(DataFrameClient, self).write_points( points, time_precision, database, retention_policy) return True def query(self, query, chunked=False, database=None): """ Quering data into a DataFrame. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise. """ results = super(DataFrameClient, self).query(query, database=database) if query.upper().startswith("SELECT"): if len(results) > 0: return self._to_dataframe(results) else: return {} else: return results def get_list_series(self, database=None): """ Get the list of series, in DataFrame """ results = super(DataFrameClient, self)\ .query("SHOW SERIES", database=database) if len(results): return dict( (key[0], pd.DataFrame(data)) for key, data in results.items() ) else: return {} def _to_dataframe(self, rs): result = {} for key, data in rs.items(): name, tags = key if tags is None: key = name else: key = (name, tuple(sorted(tags.items()))) df = pd.DataFrame(data) df.time = pd.to_datetime(df.time) df.set_index('time', inplace=True) df.index = df.index.tz_localize('UTC') df.index.name = None result[key] = df return result def _convert_dataframe_to_json(self, dataframe, measurement, tags=None, time_precision=None): if not isinstance(dataframe, pd.DataFrame): raise TypeError('Must be DataFrame, but type was: {0}.' .format(type(dataframe))) if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): raise TypeError('Must be DataFrame with DatetimeIndex or \ PeriodIndex.') dataframe.index = dataframe.index.to_datetime() if dataframe.index.tzinfo is None: dataframe.index = dataframe.index.tz_localize('UTC') # Convert column to strings dataframe.columns = dataframe.columns.astype('str') # Convert dtype for json serialization dataframe = dataframe.astype('object') precision_factor = { "n": 1, "u": 1e3, "ms": 1e6, "s": 1e9, "m": 1e9 * 60, "h": 1e9 * 3600, }.get(time_precision, 1) points = [ {'measurement': measurement, 'tags': tags if tags else {}, 'fields': rec, 'time': int(ts.value / precision_factor) } for ts, rec in zip(dataframe.index, dataframe.to_dict('record'))] return points def _datetime_to_epoch(self, datetime, time_precision='s'): seconds = (datetime - self.EPOCH).total_seconds() if time_precision == 'h': return seconds / 3600 elif time_precision == 'm': return seconds / 60 elif time_precision == 's': return seconds elif time_precision == 'ms': return seconds * 1e3 elif time_precision == 'u': return seconds * 1e6 elif time_precision == 'n': return seconds * 1e9 influxdb-2.12.0/influxdb/tests/0000755000175000017500000000000012652700261017155 5ustar reazemreazem00000000000000influxdb-2.12.0/influxdb/tests/__init__.py0000644000175000017500000000072612652700251021272 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- import sys import os if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest using_pypy = hasattr(sys, "pypy_version_info") skipIfPYpy = unittest.skipIf(using_pypy, "Skipping this test on pypy.") _skip_server_tests = os.environ.get( 'INFLUXDB_PYTHON_SKIP_SERVER_TESTS', None) == 'True' skipServerTests = unittest.skipIf(_skip_server_tests, "Skipping server tests...") influxdb-2.12.0/influxdb/tests/misc.py0000644000175000017500000000233612652700251020465 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- import socket def get_free_ports(num_ports, ip='127.0.0.1'): """Get `num_ports` free/available ports on the interface linked to the `ip´ :param int num_ports: The number of free ports to get :param str ip: The ip on which the ports have to be taken :return: a set of ports number """ sock_ports = [] ports = set() try: for _ in range(num_ports): sock = socket.socket() cur = [sock, -1] # append the socket directly, # so that it'll be also closed (no leaked resource) # in the finally here after. sock_ports.append(cur) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind((ip, 0)) cur[1] = sock.getsockname()[1] finally: for sock, port in sock_ports: sock.close() ports.add(port) assert num_ports == len(ports) return ports def is_port_open(port, ip='127.0.0.1'): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: result = sock.connect_ex((ip, port)) if not result: sock.shutdown(socket.SHUT_RDWR) return result == 0 finally: sock.close() influxdb-2.12.0/influxdb/tests/server_tests/0000755000175000017500000000000012652700261021705 5ustar reazemreazem00000000000000influxdb-2.12.0/influxdb/tests/server_tests/__init__.py0000644000175000017500000000000012652700251024003 0ustar reazemreazem00000000000000influxdb-2.12.0/influxdb/tests/server_tests/influxdb_instance.py0000644000175000017500000001522212652700251025757 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- from __future__ import print_function import datetime import os import tempfile import distutils import time import shutil import subprocess import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest from influxdb.tests.misc import is_port_open, get_free_ports # hack in check_output if it's not defined, like for python 2.6 if "check_output" not in dir(subprocess): def f(*popenargs, **kwargs): if 'stdout' in kwargs: raise ValueError( 'stdout argument not allowed, it will be overridden.' ) process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) output, unused_err = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] raise subprocess.CalledProcessError(retcode, cmd) return output subprocess.check_output = f class InfluxDbInstance(object): """ A class to launch of fresh influxdb server instance in a temporary place, using a config file template. """ def __init__(self, conf_template, udp_enabled=False): if os.environ.get("INFLUXDB_PYTHON_SKIP_SERVER_TESTS", None) == 'True': raise unittest.SkipTest( "Skipping server test (INFLUXDB_PYTHON_SKIP_SERVER_TESTS)" ) self.influxd_path = self.find_influxd_path() errors = 0 while True: try: self._start_server(conf_template, udp_enabled) break # Happens when the ports are already in use. except RuntimeError as e: errors += 1 if errors > 2: raise e def _start_server(self, conf_template, udp_enabled): # create a temporary dir to store all needed files # for the influxdb server instance : self.temp_dir_base = tempfile.mkdtemp() # "temp_dir_base" will be used for conf file and logs, # while "temp_dir_influxdb" is for the databases files/dirs : tempdir = self.temp_dir_influxdb = tempfile.mkdtemp( dir=self.temp_dir_base) # find a couple free ports : free_ports = get_free_ports(4) ports = {} for service in 'http', 'admin', 'meta', 'udp': ports[service + '_port'] = free_ports.pop() if not udp_enabled: ports['udp_port'] = -1 conf_data = dict( meta_dir=os.path.join(tempdir, 'meta'), data_dir=os.path.join(tempdir, 'data'), wal_dir=os.path.join(tempdir, 'wal'), cluster_dir=os.path.join(tempdir, 'state'), handoff_dir=os.path.join(tempdir, 'handoff'), logs_file=os.path.join(self.temp_dir_base, 'logs.txt'), udp_enabled='true' if udp_enabled else 'false', ) conf_data.update(ports) self.__dict__.update(conf_data) conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf') with open(conf_file, "w") as fh: with open(conf_template) as fh_template: fh.write(fh_template.read().format(**conf_data)) # now start the server instance: self.proc = subprocess.Popen( [self.influxd_path, '-config', conf_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) print( "%s > Started influxdb bin in %r with ports %s and %s.." % ( datetime.datetime.now(), self.temp_dir_base, self.admin_port, self.http_port ) ) # wait for it to listen on the broker and admin ports: # usually a fresh instance is ready in less than 1 sec .. timeout = time.time() + 10 # so 10 secs should be enough, # otherwise either your system load is high, # or you run a 286 @ 1Mhz ? try: while time.time() < timeout: if (is_port_open(self.http_port) and is_port_open(self.admin_port)): # it's hard to check if a UDP port is open.. if udp_enabled: # so let's just sleep 0.5 sec in this case # to be sure that the server has open the port time.sleep(0.5) break time.sleep(0.5) if self.proc.poll() is not None: raise RuntimeError('influxdb prematurely exited') else: self.proc.terminate() self.proc.wait() raise RuntimeError('Timeout waiting for influxdb to listen' ' on its ports (%s)' % ports) except RuntimeError as err: data = self.get_logs_and_output() data['reason'] = str(err) data['now'] = datetime.datetime.now() raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n" "stdout=%(out)s\nstderr=%(err)s\nlogs=%(logs)r" % data) def find_influxd_path(self): influxdb_bin_path = os.environ.get( 'INFLUXDB_PYTHON_INFLUXD_PATH', None ) if influxdb_bin_path is None: influxdb_bin_path = distutils.spawn.find_executable('influxd') if not influxdb_bin_path: try: influxdb_bin_path = subprocess.check_output( ['which', 'influxdb'] ).strip() except subprocess.CalledProcessError: # fallback on : influxdb_bin_path = '/opt/influxdb/influxd' if not os.path.isfile(influxdb_bin_path): raise unittest.SkipTest("Could not find influxd binary") version = subprocess.check_output([influxdb_bin_path, 'version']) print("InfluxDB version: %s" % version, file=sys.stderr) return influxdb_bin_path def get_logs_and_output(self): proc = self.proc try: with open(self.logs_file) as fh: logs = fh.read() except IOError as err: logs = "Couldn't read logs: %s" % err return { 'rc': proc.returncode, 'out': proc.stdout.read(), 'err': proc.stderr.read(), 'logs': logs } def close(self, remove_tree=True): self.proc.terminate() self.proc.wait() if remove_tree: shutil.rmtree(self.temp_dir_base) influxdb-2.12.0/influxdb/tests/server_tests/base.py0000644000175000017500000000375712652700251023204 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- import sys from influxdb.tests import using_pypy from influxdb.tests.server_tests.influxdb_instance import InfluxDbInstance from influxdb.client import InfluxDBClient if not using_pypy: from influxdb.dataframe_client import DataFrameClient def _setup_influxdb_server(inst): inst.influxd_inst = InfluxDbInstance( inst.influxdb_template_conf, udp_enabled=getattr(inst, 'influxdb_udp_enabled', False), ) inst.cli = InfluxDBClient('localhost', inst.influxd_inst.http_port, 'root', '', database='db') if not using_pypy: inst.cliDF = DataFrameClient('localhost', inst.influxd_inst.http_port, 'root', '', database='db') def _teardown_influxdb_server(inst): remove_tree = sys.exc_info() == (None, None, None) inst.influxd_inst.close(remove_tree=remove_tree) class SingleTestCaseWithServerMixin(object): ''' A mixin for unittest.TestCase to start an influxdb server instance in a temporary directory **for each test function/case** ''' # 'influxdb_template_conf' attribute must be set # on the TestCase class or instance. setUp = _setup_influxdb_server tearDown = _teardown_influxdb_server class ManyTestCasesWithServerMixin(object): ''' Same than SingleTestCaseWithServerMixin but creates a single instance for the whole class. Also pre-creates a fresh database: 'db'. ''' # 'influxdb_template_conf' attribute must be set on the class itself ! @classmethod def setUpClass(cls): _setup_influxdb_server(cls) def setUp(self): self.cli.create_database('db') @classmethod def tearDownClass(cls): _teardown_influxdb_server(cls) def tearDown(self): self.cli.drop_database('db') influxdb-2.12.0/influxdb/tests/server_tests/client_test_with_server.py0000644000175000017500000005705412652700251027227 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ unit tests for checking the good/expected interaction between : + the python client.. (obviously) + and a *_real_* server instance running. This basically duplicates what's in client_test.py but without mocking around every call. """ from __future__ import print_function from functools import partial import os import time import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest import warnings # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) from influxdb import InfluxDBClient from influxdb.exceptions import InfluxDBClientError from influxdb.tests import skipIfPYpy, using_pypy, skipServerTests from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin if not using_pypy: import pandas as pd from pandas.util.testing import assert_frame_equal THIS_DIR = os.path.abspath(os.path.dirname(__file__)) def point(serie_name, timestamp=None, tags=None, **fields): res = {'measurement': serie_name} if timestamp: res['time'] = timestamp if tags: res['tags'] = tags res['fields'] = fields return res dummy_point = [ # some dummy points { "measurement": "cpu_load_short", "tags": { "host": "server01", "region": "us-west" }, "time": "2009-11-10T23:00:00Z", "fields": { "value": 0.64 } } ] dummy_points = [ # some dummy points dummy_point[0], { "measurement": "memory", "tags": { "host": "server01", "region": "us-west" }, "time": "2009-11-10T23:01:35Z", "fields": { "value": 33.0 } } ] if not using_pypy: dummy_pointDF = { "measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "dataframe": pd.DataFrame( [[0.64]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z"])) } dummy_pointsDF = [{ "measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "dataframe": pd.DataFrame( [[0.64]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z"])), }, { "measurement": "memory", "tags": {"host": "server01", "region": "us-west"}, "dataframe": pd.DataFrame( [[33]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:01:35Z"]) ) }] dummy_point_without_timestamp = [ { "measurement": "cpu_load_short", "tags": { "host": "server02", "region": "us-west" }, "fields": { "value": 0.64 } } ] @skipServerTests class SimpleTests(SingleTestCaseWithServerMixin, unittest.TestCase): influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') def test_fresh_server_no_db(self): self.assertEqual([], self.cli.get_list_database()) def test_create_database(self): self.assertIsNone(self.cli.create_database('new_db_1')) self.assertIsNone(self.cli.create_database('new_db_2')) self.assertEqual( self.cli.get_list_database(), [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) def test_create_database_twice_if_not_exist(self): self.assertIsNone(self.cli.create_database('new_db')) self.assertIsNone( self.cli.create_database('new_db', if_not_exists=True)) def test_create_database_twice_fails(self): self.assertIsNone(self.cli.create_database('new_db')) with self.assertRaises(InfluxDBClientError) as ctx: self.cli.create_database('new_db') self.assertEqual('database already exists', ctx.exception.content) def test_get_list_series_empty(self): rsp = self.cli.get_list_series() self.assertEqual([], rsp) @unittest.skip("Broken as of 0.9.0") def test_get_list_series_empty_DF(self): rsp = self.cliDF.get_list_series() self.assertEqual({}, rsp) def test_drop_database(self): self.test_create_database() self.assertIsNone(self.cli.drop_database('new_db_1')) self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database()) def test_drop_database_fails(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.drop_database('db') self.assertIn('database not found: db', ctx.exception.content) def test_query_fail(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.query('select column_one from foo') self.assertIn('database not found: db', ctx.exception.content) def test_query_fail_ignore_errors(self): result = self.cli.query('select column_one from foo', raise_errors=False) self.assertEqual(result.error, 'database not found: db') def test_create_user(self): self.cli.create_user('test_user', 'secret_password') rsp = list(self.cli.query("SHOW USERS")['results']) self.assertIn({'user': 'test_user', 'admin': False}, rsp) def test_create_user_admin(self): self.cli.create_user('test_user', 'secret_password', True) rsp = list(self.cli.query("SHOW USERS")['results']) self.assertIn({'user': 'test_user', 'admin': True}, rsp) def test_create_user_blank_password(self): self.cli.create_user('test_user', '') rsp = list(self.cli.query("SHOW USERS")['results']) self.assertIn({'user': 'test_user', 'admin': False}, rsp) def test_get_list_users_empty(self): rsp = self.cli.get_list_users() self.assertEqual([], rsp) def test_get_list_users(self): self.cli.query("CREATE USER test WITH PASSWORD 'test'") rsp = self.cli.get_list_users() self.assertEqual( [{'user': 'test', 'admin': False}], rsp ) def test_create_user_blank_username(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.create_user('', 'secret_password') self.assertEqual(400, ctx.exception.code) self.assertIn('{"error":"error parsing query: ' 'found WITH, expected identifier', ctx.exception.content) rsp = list(self.cli.query("SHOW USERS")['results']) self.assertEqual(rsp, []) def test_create_user_invalid_username(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.create_user('very invalid', 'secret_password') self.assertEqual(400, ctx.exception.code) self.assertIn('{"error":"error parsing query: ' 'found invalid, expected WITH', ctx.exception.content) rsp = list(self.cli.query("SHOW USERS")['results']) self.assertEqual(rsp, []) def test_drop_user(self): self.cli.query("CREATE USER test WITH PASSWORD 'test'") self.cli.drop_user('test') users = list(self.cli.query("SHOW USERS")['results']) self.assertEqual(users, []) def test_drop_user_nonexisting(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.drop_user('test') self.assertIn('user not found', ctx.exception.content) def test_drop_user_invalid(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.drop_user('very invalid') self.assertEqual(400, ctx.exception.code) self.assertIn('{"error":"error parsing query: ' 'found invalid, expected', ctx.exception.content) @unittest.skip("Broken as of 0.9.0") def test_revoke_admin_privileges(self): self.cli.create_user('test', 'test', admin=True) self.assertEqual([{'user': 'test', 'admin': True}], self.cli.get_list_users()) self.cli.revoke_admin_privileges('test') self.assertEqual([{'user': 'test', 'admin': False}], self.cli.get_list_users()) def test_revoke_admin_privileges_invalid(self): with self.assertRaises(InfluxDBClientError) as ctx: self.cli.revoke_admin_privileges('') self.assertEqual(400, ctx.exception.code) self.assertIn('{"error":"error parsing query: ', ctx.exception.content) def test_grant_privilege(self): self.cli.create_user('test', 'test') self.cli.create_database('testdb') self.cli.grant_privilege('all', 'testdb', 'test') # TODO: when supported by InfluxDB, check if privileges are granted def test_grant_privilege_invalid(self): self.cli.create_user('test', 'test') self.cli.create_database('testdb') with self.assertRaises(InfluxDBClientError) as ctx: self.cli.grant_privilege('', 'testdb', 'test') self.assertEqual(400, ctx.exception.code) self.assertIn('{"error":"error parsing query: ', ctx.exception.content) def test_revoke_privilege(self): self.cli.create_user('test', 'test') self.cli.create_database('testdb') self.cli.revoke_privilege('all', 'testdb', 'test') # TODO: when supported by InfluxDB, check if privileges are revoked def test_revoke_privilege_invalid(self): self.cli.create_user('test', 'test') self.cli.create_database('testdb') with self.assertRaises(InfluxDBClientError) as ctx: self.cli.revoke_privilege('', 'testdb', 'test') self.assertEqual(400, ctx.exception.code) self.assertIn('{"error":"error parsing query: ', ctx.exception.content) @skipServerTests class CommonTests(ManyTestCasesWithServerMixin, unittest.TestCase): influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') def test_write(self): self.assertIs(True, self.cli.write( {'points': dummy_point}, params={'db': 'db'}, )) def test_write_check_read(self): self.test_write() time.sleep(1) rsp = self.cli.query('SELECT * FROM cpu_load_short', database='db') self.assertListEqual([{'value': 0.64, 'time': '2009-11-10T23:00:00Z', "host": "server01", "region": "us-west"}], list(rsp.get_points())) def test_write_points(self): self.assertIs(True, self.cli.write_points(dummy_point)) @skipIfPYpy def test_write_points_DF(self): self.assertIs( True, self.cliDF.write_points( dummy_pointDF['dataframe'], dummy_pointDF['measurement'], dummy_pointDF['tags'] ) ) def test_write_points_check_read(self): self.test_write_points() time.sleep(1) # same as test_write_check_read() rsp = self.cli.query('SELECT * FROM cpu_load_short') self.assertEqual( list(rsp), [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z', "host": "server01", "region": "us-west"}]] ) rsp2 = list(rsp.get_points()) self.assertEqual(len(rsp2), 1) pt = rsp2[0] self.assertEqual( pt, {'time': '2009-11-10T23:00:00Z', 'value': 0.64, "host": "server01", "region": "us-west"} ) @unittest.skip("Broken as of 0.9.0") def test_write_points_check_read_DF(self): self.test_write_points_DF() time.sleep(1) # same as test_write_check_read() rsp = self.cliDF.query('SELECT * FROM cpu_load_short') assert_frame_equal( rsp['cpu_load_short'], dummy_pointDF['dataframe'] ) # Query with Tags rsp = self.cliDF.query( "SELECT * FROM cpu_load_short GROUP BY *") assert_frame_equal( rsp[('cpu_load_short', (('host', 'server01'), ('region', 'us-west')))], dummy_pointDF['dataframe'] ) def test_write_multiple_points_different_series(self): self.assertIs(True, self.cli.write_points(dummy_points)) time.sleep(1) rsp = self.cli.query('SELECT * FROM cpu_load_short') lrsp = list(rsp) self.assertEqual( [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z', "host": "server01", "region": "us-west"}]], lrsp ) rsp = list(self.cli.query('SELECT * FROM memory')) self.assertEqual( rsp, [[{'value': 33, 'time': '2009-11-10T23:01:35Z', "host": "server01", "region": "us-west"}]] ) @unittest.skip("Broken as of 0.9.0") def test_write_multiple_points_different_series_DF(self): for i in range(2): self.assertIs( True, self.cliDF.write_points( dummy_pointsDF[i]['dataframe'], dummy_pointsDF[i]['measurement'], dummy_pointsDF[i]['tags'])) time.sleep(1) rsp = self.cliDF.query('SELECT * FROM cpu_load_short') assert_frame_equal( rsp['cpu_load_short'], dummy_pointsDF[0]['dataframe'] ) rsp = self.cliDF.query('SELECT * FROM memory') assert_frame_equal( rsp['memory'], dummy_pointsDF[1]['dataframe'] ) def test_write_points_batch(self): dummy_points = [ {"measurement": "cpu_usage", "tags": {"unit": "percent"}, "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, {"measurement": "network", "tags": {"direction": "in"}, "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, {"measurement": "network", "tags": {"direction": "out"}, "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} ] self.cli.write_points(points=dummy_points, tags={"host": "server01", "region": "us-west"}, batch_size=2) time.sleep(5) net_in = self.cli.query("SELECT value FROM network " "WHERE direction='in'").raw net_out = self.cli.query("SELECT value FROM network " "WHERE direction='out'").raw cpu = self.cli.query("SELECT value FROM cpu_usage").raw self.assertIn(123, net_in['series'][0]['values'][0]) self.assertIn(12, net_out['series'][0]['values'][0]) self.assertIn(12.34, cpu['series'][0]['values'][0]) def test_query(self): self.assertIs(True, self.cli.write_points(dummy_point)) @unittest.skip('Not implemented for 0.9') def test_query_chunked(self): cli = InfluxDBClient(database='db') example_object = { 'points': [ [1415206250119, 40001, 667], [1415206244555, 30001, 7], [1415206228241, 20001, 788], [1415206212980, 10001, 555], [1415197271586, 10001, 23] ], 'name': 'foo', 'columns': [ 'time', 'sequence_number', 'val' ] } del cli del example_object # TODO ? def test_get_list_series_and_delete(self): self.cli.write_points(dummy_point) rsp = self.cli.get_list_series() self.assertEqual( [ {'name': 'cpu_load_short', 'tags': [ {'host': 'server01', 'region': 'us-west', '_key': 'cpu_load_short,host=server01,region=us-west'}]} ], rsp ) def test_delete_series_invalid(self): with self.assertRaises(InfluxDBClientError): self.cli.delete_series() def test_delete_series(self): self.assertEqual(len(self.cli.get_list_series()), 0) self.cli.write_points(dummy_points) self.assertEqual(len(self.cli.get_list_series()), 2) self.cli.delete_series(measurement='cpu_load_short') self.assertEqual(len(self.cli.get_list_series()), 1) self.cli.delete_series(tags={'region': 'us-west'}) self.assertEqual(len(self.cli.get_list_series()), 0) @unittest.skip("Broken as of 0.9.0") def test_get_list_series_DF(self): self.cli.write_points(dummy_point) rsp = self.cliDF.get_list_series() expected = pd.DataFrame( [[1, 'server01', 'us-west']], columns=['_id', 'host', 'region']) assert_frame_equal(rsp['cpu_load_short'], expected) def test_default_retention_policy(self): rsp = self.cli.get_list_retention_policies() self.assertEqual( [ {'name': 'default', 'duration': '0', 'replicaN': 1, 'default': True} ], rsp ) def test_create_retention_policy_default(self): self.cli.create_retention_policy('somename', '1d', 1, default=True) self.cli.create_retention_policy('another', '2d', 1, default=False) rsp = self.cli.get_list_retention_policies() self.assertEqual( [ {'duration': '0', 'default': False, 'replicaN': 1, 'name': 'default'}, {'duration': '24h0m0s', 'default': True, 'replicaN': 1, 'name': 'somename'}, {'duration': '48h0m0s', 'default': False, 'replicaN': 1, 'name': 'another'} ], rsp ) def test_create_retention_policy(self): self.cli.create_retention_policy('somename', '1d', 1) rsp = self.cli.get_list_retention_policies() self.assertEqual( [{'duration': '0', 'default': True, 'replicaN': 1, 'name': 'default'}, {'duration': '24h0m0s', 'default': False, 'replicaN': 1, 'name': 'somename'}], rsp ) def test_alter_retention_policy(self): self.cli.create_retention_policy('somename', '1d', 1) # Test alter duration self.cli.alter_retention_policy('somename', 'db', duration='4d') rsp = self.cli.get_list_retention_policies() self.assertEqual( [{'duration': '0', 'default': True, 'replicaN': 1, 'name': 'default'}, {'duration': '96h0m0s', 'default': False, 'replicaN': 1, 'name': 'somename'}], rsp ) # Test alter replication self.cli.alter_retention_policy('somename', 'db', replication=4) rsp = self.cli.get_list_retention_policies() self.assertEqual( [{'duration': '0', 'default': True, 'replicaN': 1, 'name': 'default'}, {'duration': '96h0m0s', 'default': False, 'replicaN': 4, 'name': 'somename'}], rsp ) # Test alter default self.cli.alter_retention_policy('somename', 'db', default=True) rsp = self.cli.get_list_retention_policies() self.assertEqual( [{'duration': '0', 'default': False, 'replicaN': 1, 'name': 'default'}, {'duration': '96h0m0s', 'default': True, 'replicaN': 4, 'name': 'somename'}], rsp ) def test_alter_retention_policy_invalid(self): self.cli.create_retention_policy('somename', '1d', 1) with self.assertRaises(InfluxDBClientError) as ctx: self.cli.alter_retention_policy('somename', 'db') self.assertEqual(400, ctx.exception.code) self.assertIn('{"error":"error parsing query: ', ctx.exception.content) rsp = self.cli.get_list_retention_policies() self.assertEqual( [{'duration': '0', 'default': True, 'replicaN': 1, 'name': 'default'}, {'duration': '24h0m0s', 'default': False, 'replicaN': 1, 'name': 'somename'}], rsp ) def test_issue_143(self): pt = partial(point, 'a_serie_name', timestamp='2015-03-30T16:16:37Z') pts = [ pt(value=15), pt(tags={'tag_1': 'value1'}, value=5), pt(tags={'tag_1': 'value2'}, value=10), ] self.cli.write_points(pts) time.sleep(1) rsp = list(self.cli.query('SELECT * FROM a_serie_name GROUP BY tag_1')) self.assertEqual( [ [{'value': 15, 'time': '2015-03-30T16:16:37Z'}], [{'value': 5, 'time': '2015-03-30T16:16:37Z'}], [{'value': 10, 'time': '2015-03-30T16:16:37Z'}] ], rsp ) # a slightly more complex one with 2 tags values: pt = partial(point, 'serie2', timestamp='2015-03-30T16:16:37Z') pts = [ pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0), pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5), pt(tags={'tag1': 'value2', 'tag2': 'v1'}, value=10), ] self.cli.write_points(pts) time.sleep(1) rsp = self.cli.query('SELECT * FROM serie2 GROUP BY tag1,tag2') self.assertEqual( [ [{'value': 0, 'time': '2015-03-30T16:16:37Z'}], [{'value': 5, 'time': '2015-03-30T16:16:37Z'}], [{'value': 10, 'time': '2015-03-30T16:16:37Z'}] ], list(rsp) ) all_tag2_equal_v1 = list(rsp[None, {'tag2': 'v1'}]) self.assertEqual( [{'value': 0, 'time': '2015-03-30T16:16:37Z'}, {'value': 10, 'time': '2015-03-30T16:16:37Z'}], all_tag2_equal_v1, ) def test_query_multiple_series(self): pt = partial(point, 'serie1', timestamp='2015-03-30T16:16:37Z') pts = [ pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0), ] self.cli.write_points(pts) pt = partial(point, 'serie2', timestamp='1970-03-30T16:16:37Z') pts = [ pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0, data1=33, data2="bla"), ] self.cli.write_points(pts) @skipServerTests class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase): influxdb_udp_enabled = True influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template') def test_write_points_udp(self): cli = InfluxDBClient( 'localhost', self.influxd_inst.http_port, 'root', '', database='db', use_udp=True, udp_port=self.influxd_inst.udp_port ) cli.write_points(dummy_point) # The points are not immediately available after write_points. # This is to be expected because we are using udp (no response !). # So we have to wait some time, time.sleep(3) # 3 sec seems to be a good choice. rsp = self.cli.query('SELECT * FROM cpu_load_short') self.assertEqual( # this is dummy_points : [{'value': 0.64, 'time': '2009-11-10T23:00:00Z', "host": "server01", "region": "us-west"}], list(rsp['cpu_load_short']) ) influxdb-2.12.0/influxdb/tests/chunked_json_test.py0000644000175000017500000000303512652700251023240 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- import sys from influxdb import chunked_json if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest class TestChunkJson(unittest.TestCase): @classmethod def setUpClass(cls): super(TestChunkJson, cls).setUpClass() def test_load(self): """ Tests reading a sequence of JSON values from a string """ example_response = \ '{"results": [{"series": [{"measurement": "sdfsdfsdf", ' \ '"columns": ["time", "value"], "values": ' \ '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \ '[{"measurement": "cpu_load_short", "columns": ["time", "value"],'\ '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}' res = list(chunked_json.loads(example_response)) # import ipdb; ipdb.set_trace() # self.assertTrue(res) self.assertListEqual( [ { 'results': [ {'series': [{ 'values': [['2009-11-10T23:00:00Z', 0.64]], 'measurement': 'sdfsdfsdf', 'columns': ['time', 'value']}]}, {'series': [{ 'values': [['2009-11-10T23:00:00Z', 0.64]], 'measurement': 'cpu_load_short', 'columns': ['time', 'value']}]} ] } ], res ) influxdb-2.12.0/influxdb/tests/resultset_test.py0000644000175000017500000001300512652700251022616 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest from influxdb.exceptions import InfluxDBClientError from influxdb.resultset import ResultSet class TestResultSet(unittest.TestCase): def setUp(self): self.query_response = { "results": [ {"series": [{"measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.64] ]}, {"measurement": "cpu_load_short", "tags": {"host": "server02", "region": "us-west"}, "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.65] ]}, {"measurement": "other_serie", "tags": {"host": "server01", "region": "us-west"}, "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.66] ]}]} ] } self.rs = ResultSet(self.query_response['results'][0]) def test_filter_by_name(self): expected = [ {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'} ] self.assertEqual(expected, list(self.rs['cpu_load_short'])) self.assertEqual(expected, list(self.rs.get_points( measurement='cpu_load_short'))) def test_filter_by_tags(self): expected = [ {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}, {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.66} ] self.assertEqual( expected, list(self.rs[{"host": "server01"}]) ) self.assertEqual( expected, list(self.rs.get_points(tags={'host': 'server01'})) ) def test_filter_by_name_and_tags(self): self.assertEqual( list(self.rs[('cpu_load_short', {"host": "server01"})]), [{'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}] ) self.assertEqual( list(self.rs[('cpu_load_short', {"region": "us-west"})]), [ {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'} ] ) def test_keys(self): self.assertEqual( self.rs.keys(), [ ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}), ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}), ('other_serie', {'host': 'server01', 'region': 'us-west'}) ] ) def test_len(self): self.assertEqual( len(self.rs), 3 ) def test_items(self): items = list(self.rs.items()) items_lists = [(item[0], list(item[1])) for item in items] self.assertEqual( items_lists, [ ( ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}), [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] ), ( ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}), [{'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'}] ), ( ('other_serie', {'host': 'server01', 'region': 'us-west'}), [{'value': 0.66, 'time': '2015-01-29T21:51:28.968422294Z'}] ) ] ) def test_point_from_cols_vals(self): cols = ['col1', 'col2'] vals = [1, '2'] point = ResultSet.point_from_cols_vals(cols, vals) self.assertDictEqual( point, {'col1': 1, 'col2': '2'} ) def test_system_query(self): rs = ResultSet( {'series': [ {'values': [['another', '48h0m0s', 3, False], ['default', '0', 1, False], ['somename', '24h0m0s', 4, True]], 'columns': ['name', 'duration', 'replicaN', 'default']}]} ) self.assertEqual( rs.keys(), [('results', None)] ) self.assertEqual( list(rs['results']), [ {'duration': '48h0m0s', 'default': False, 'replicaN': 3, 'name': 'another'}, {'duration': '0', 'default': False, 'replicaN': 1, 'name': 'default'}, {'duration': '24h0m0s', 'default': True, 'replicaN': 4, 'name': 'somename'} ] ) def test_resultset_error(self): with self.assertRaises(InfluxDBClientError): ResultSet({ "series": [], "error": "Big error, many problems." }) influxdb-2.12.0/influxdb/tests/test_line_protocol.py0000644000175000017500000000267012652700251023442 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest from influxdb import line_protocol class TestLineProtocol(unittest.TestCase): def test_make_lines(self): data = { "tags": { "empty_tag": "", "none_tag": None, "integer_tag": 2, "string_tag": "hello" }, "points": [ { "measurement": "test", "fields": { "string_val": "hello!", "int_val": 1, "float_val": 1.1, "none_field": None, "bool_val": True, } } ] } self.assertEqual( line_protocol.make_lines(data), 'test,integer_tag=2,string_tag=hello ' 'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n' ) def test_string_val_newline(self): data = { "points": [ { "measurement": "m1", "fields": { "multi_line": "line1\nline1\nline3" } } ] } self.assertEqual( line_protocol.make_lines(data), 'm1 multi_line="line1\\nline1\\nline3"\n' ) influxdb-2.12.0/influxdb/tests/influxdb08/0000755000175000017500000000000012652700261021140 5ustar reazemreazem00000000000000influxdb-2.12.0/influxdb/tests/influxdb08/__init__.py0000644000175000017500000000003012652700251023241 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- influxdb-2.12.0/influxdb/tests/influxdb08/dataframe_client_test.py0000644000175000017500000003031512652700251026034 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ unit tests for misc module """ from .client_test import _mocked_session import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest import json import requests_mock from nose.tools import raises from datetime import timedelta from influxdb.tests import skipIfPYpy, using_pypy import copy import warnings if not using_pypy: import pandas as pd from pandas.util.testing import assert_frame_equal from influxdb.influxdb08 import DataFrameClient @skipIfPYpy class TestDataFrameClient(unittest.TestCase): def setUp(self): # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) def test_write_points_from_dataframe(self): now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], columns=["column_one", "column_two", "column_three"]) points = [ { "points": [ ["1", 1, 1.0, 0], ["2", 2, 2.0, 3600] ], "name": "foo", "columns": ["column_one", "column_two", "column_three", "time"] } ] with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = DataFrameClient(database='db') cli.write_points({"foo": dataframe}) self.assertListEqual(json.loads(m.last_request.body), points) def test_write_points_from_dataframe_with_float_nan(self): now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[[1, float("NaN"), 1.0], [2, 2, 2.0]], index=[now, now + timedelta(hours=1)], columns=["column_one", "column_two", "column_three"]) points = [ { "points": [ [1, None, 1.0, 0], [2, 2, 2.0, 3600] ], "name": "foo", "columns": ["column_one", "column_two", "column_three", "time"] } ] with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = DataFrameClient(database='db') cli.write_points({"foo": dataframe}) self.assertListEqual(json.loads(m.last_request.body), points) def test_write_points_from_dataframe_in_batches(self): now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], columns=["column_one", "column_two", "column_three"]) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = DataFrameClient(database='db') self.assertTrue(cli.write_points({"foo": dataframe}, batch_size=1)) def test_write_points_from_dataframe_with_numeric_column_names(self): now = pd.Timestamp('1970-01-01 00:00+00:00') # df with numeric column names dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)]) points = [ { "points": [ ["1", 1, 1.0, 0], ["2", 2, 2.0, 3600] ], "name": "foo", "columns": ['0', '1', '2', "time"] } ] with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = DataFrameClient(database='db') cli.write_points({"foo": dataframe}) self.assertListEqual(json.loads(m.last_request.body), points) def test_write_points_from_dataframe_with_period_index(self): dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[pd.Period('1970-01-01'), pd.Period('1970-01-02')], columns=["column_one", "column_two", "column_three"]) points = [ { "points": [ ["1", 1, 1.0, 0], ["2", 2, 2.0, 86400] ], "name": "foo", "columns": ["column_one", "column_two", "column_three", "time"] } ] with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = DataFrameClient(database='db') cli.write_points({"foo": dataframe}) self.assertListEqual(json.loads(m.last_request.body), points) def test_write_points_from_dataframe_with_time_precision(self): now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], columns=["column_one", "column_two", "column_three"]) points = [ { "points": [ ["1", 1, 1.0, 0], ["2", 2, 2.0, 3600] ], "name": "foo", "columns": ["column_one", "column_two", "column_three", "time"] } ] points_ms = copy.deepcopy(points) points_ms[0]["points"][1][-1] = 3600 * 1000 points_us = copy.deepcopy(points) points_us[0]["points"][1][-1] = 3600 * 1000000 with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = DataFrameClient(database='db') cli.write_points({"foo": dataframe}, time_precision='s') self.assertListEqual(json.loads(m.last_request.body), points) cli.write_points({"foo": dataframe}, time_precision='m') self.assertListEqual(json.loads(m.last_request.body), points_ms) cli.write_points({"foo": dataframe}, time_precision='u') self.assertListEqual(json.loads(m.last_request.body), points_us) @raises(TypeError) def test_write_points_from_dataframe_fails_without_time_index(self): dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], columns=["column_one", "column_two", "column_three"]) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = DataFrameClient(database='db') cli.write_points({"foo": dataframe}) @raises(TypeError) def test_write_points_from_dataframe_fails_with_series(self): now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.Series(data=[1.0, 2.0], index=[now, now + timedelta(hours=1)]) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = DataFrameClient(database='db') cli.write_points({"foo": dataframe}) def test_query_into_dataframe(self): data = [ { "name": "foo", "columns": ["time", "sequence_number", "column_one"], "points": [ [3600, 16, 2], [3600, 15, 1], [0, 14, 2], [0, 13, 1] ] } ] # dataframe sorted ascending by time first, then sequence_number dataframe = pd.DataFrame(data=[[13, 1], [14, 2], [15, 1], [16, 2]], index=pd.to_datetime([0, 0, 3600, 3600], unit='s', utc=True), columns=['sequence_number', 'column_one']) with _mocked_session('get', 200, data): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') result = cli.query('select column_one from foo;') assert_frame_equal(dataframe, result) def test_query_multiple_time_series(self): data = [ { "name": "series1", "columns": ["time", "mean", "min", "max", "stddev"], "points": [[0, 323048, 323048, 323048, 0]] }, { "name": "series2", "columns": ["time", "mean", "min", "max", "stddev"], "points": [[0, -2.8233, -2.8503, -2.7832, 0.0173]] }, { "name": "series3", "columns": ["time", "mean", "min", "max", "stddev"], "points": [[0, -0.01220, -0.01220, -0.01220, 0]] } ] dataframes = { 'series1': pd.DataFrame(data=[[323048, 323048, 323048, 0]], index=pd.to_datetime([0], unit='s', utc=True), columns=['mean', 'min', 'max', 'stddev']), 'series2': pd.DataFrame(data=[[-2.8233, -2.8503, -2.7832, 0.0173]], index=pd.to_datetime([0], unit='s', utc=True), columns=['mean', 'min', 'max', 'stddev']), 'series3': pd.DataFrame(data=[[-0.01220, -0.01220, -0.01220, 0]], index=pd.to_datetime([0], unit='s', utc=True), columns=['mean', 'min', 'max', 'stddev']) } with _mocked_session('get', 200, data): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') result = cli.query("""select mean(value), min(value), max(value), stddev(value) from series1, series2, series3""") self.assertEqual(dataframes.keys(), result.keys()) for key in dataframes.keys(): assert_frame_equal(dataframes[key], result[key]) def test_query_with_empty_result(self): with _mocked_session('get', 200, []): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') result = cli.query('select column_one from foo;') self.assertEqual(result, []) def test_list_series(self): response = [ { 'columns': ['time', 'name'], 'name': 'list_series_result', 'points': [[0, 'seriesA'], [0, 'seriesB']] } ] with _mocked_session('get', 200, response): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') series_list = cli.get_list_series() self.assertEqual(series_list, ['seriesA', 'seriesB']) def test_datetime_to_epoch(self): timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00') cli = DataFrameClient('host', 8086, 'username', 'password', 'db') self.assertEqual( cli._datetime_to_epoch(timestamp), 1356998400.0 ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='s'), 1356998400.0 ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='m'), 1356998400000.0 ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='ms'), 1356998400000.0 ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='u'), 1356998400000000.0 ) influxdb-2.12.0/influxdb/tests/influxdb08/client_test.py0000644000175000017500000006667712652700251024054 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ unit tests """ import json import requests import requests.exceptions import socket import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest import requests_mock import random from nose.tools import raises from mock import patch import warnings import mock from influxdb.influxdb08 import InfluxDBClient from influxdb.influxdb08.client import session import sys if sys.version < '3': import codecs def u(x): return codecs.unicode_escape_decode(x)[0] else: def u(x): return x def _build_response_object(status_code=200, content=""): resp = requests.Response() resp.status_code = status_code resp._content = content.encode("utf8") return resp def _mocked_session(method="GET", status_code=200, content=""): method = method.upper() def request(*args, **kwargs): c = content # Check method assert method == kwargs.get('method', 'GET') if method == 'POST': data = kwargs.get('data', None) if data is not None: # Data must be a string assert isinstance(data, str) # Data must be a JSON string assert c == json.loads(data, strict=True) c = data # Anyway, Content must be a JSON string (or empty string) if not isinstance(c, str): c = json.dumps(c) return _build_response_object(status_code=status_code, content=c) mocked = patch.object( session, 'request', side_effect=request ) return mocked class TestInfluxDBClient(unittest.TestCase): def setUp(self): # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) self.dummy_points = [ { "points": [ ["1", 1, 1.0], ["2", 2, 2.0] ], "name": "foo", "columns": ["column_one", "column_two", "column_three"] } ] self.dsn_string = 'influxdb://uSr:pWd@host:1886/db' def test_scheme(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') self.assertEqual(cli._baseurl, 'http://host:8086') cli = InfluxDBClient( 'host', 8086, 'username', 'password', 'database', ssl=True ) self.assertEqual(cli._baseurl, 'https://host:8086') def test_dsn(self): cli = InfluxDBClient.from_DSN(self.dsn_string) self.assertEqual('http://host:1886', cli._baseurl) self.assertEqual('uSr', cli._username) self.assertEqual('pWd', cli._password) self.assertEqual('db', cli._database) self.assertFalse(cli.use_udp) cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string) self.assertTrue(cli.use_udp) cli = InfluxDBClient.from_DSN('https+' + self.dsn_string) self.assertEqual('https://host:1886', cli._baseurl) cli = InfluxDBClient.from_DSN('https+' + self.dsn_string, **{'ssl': False}) self.assertEqual('http://host:1886', cli._baseurl) def test_switch_database(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_database('another_database') self.assertEqual(cli._database, 'another_database') @raises(FutureWarning) def test_switch_db_deprecated(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_db('another_database') self.assertEqual(cli._database, 'another_database') def test_switch_user(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_user('another_username', 'another_password') self.assertEqual(cli._username, 'another_username') self.assertEqual(cli._password, 'another_password') def test_write(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/write" ) cli = InfluxDBClient(database='db') cli.write( {"database": "mydb", "retentionPolicy": "mypolicy", "points": [{"name": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "timestamp": "2009-11-10T23:00:00Z", "values": {"value": 0.64}}]} ) self.assertEqual( json.loads(m.last_request.body), {"database": "mydb", "retentionPolicy": "mypolicy", "points": [{"name": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "timestamp": "2009-11-10T23:00:00Z", "values": {"value": 0.64}}]} ) def test_write_points(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/series" ) cli = InfluxDBClient(database='db') cli.write_points( self.dummy_points ) self.assertListEqual( json.loads(m.last_request.body), self.dummy_points ) def test_write_points_string(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/series" ) cli = InfluxDBClient(database='db') cli.write_points( str(json.dumps(self.dummy_points)) ) self.assertListEqual( json.loads(m.last_request.body), self.dummy_points ) def test_write_points_batch(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') cli.write_points(data=self.dummy_points, batch_size=2) self.assertEqual(1, m.call_count) def test_write_points_batch_invalid_size(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') cli.write_points(data=self.dummy_points, batch_size=-2) self.assertEqual(1, m.call_count) def test_write_points_batch_multiple_series(self): dummy_points = [ {"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0], ["4", 4, 4.0], ["5", 5, 5.0]], "name": "foo", "columns": ["val1", "val2", "val3"]}, {"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0], ["4", 4, 4.0], ["5", 5, 5.0], ["6", 6, 6.0], ["7", 7, 7.0], ["8", 8, 8.0]], "name": "bar", "columns": ["val1", "val2", "val3"]}, ] expected_last_body = [{'points': [['7', 7, 7.0], ['8', 8, 8.0]], 'name': 'bar', 'columns': ['val1', 'val2', 'val3']}] with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') cli.write_points(data=dummy_points, batch_size=3) self.assertEqual(m.call_count, 5) self.assertEqual(expected_last_body, m.request_history[4].json()) def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) port = random.randint(4000, 8000) s.bind(('0.0.0.0', port)) cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', 'test', use_udp=True, udp_port=port ) cli.write_points(self.dummy_points) received_data, addr = s.recvfrom(1024) self.assertEqual(self.dummy_points, json.loads(received_data.decode(), strict=True)) def test_write_bad_precision_udp(self): cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', 'test', use_udp=True, udp_port=4444 ) with self.assertRaisesRegexp( Exception, "InfluxDB only supports seconds precision for udp writes" ): cli.write_points( self.dummy_points, time_precision='ms' ) @raises(Exception) def test_write_points_fails(self): with _mocked_session('post', 500): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.write_points([]) def test_write_points_with_precision(self): with _mocked_session('post', 200, self.dummy_points): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.write_points(self.dummy_points)) def test_write_points_bad_precision(self): cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" ): cli.write_points( self.dummy_points, time_precision='g' ) @raises(Exception) def test_write_points_with_precision_fails(self): with _mocked_session('post', 500): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.write_points_with_precision([]) def test_delete_points(self): with _mocked_session('delete', 204) as mocked: cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.delete_points("foo")) self.assertEqual(len(mocked.call_args_list), 1) args, kwds = mocked.call_args_list[0] self.assertEqual(kwds['params'], {'u': 'username', 'p': 'password'}) self.assertEqual(kwds['url'], 'http://host:8086/db/db/series/foo') @raises(Exception) def test_delete_points_with_wrong_name(self): with _mocked_session('delete', 400): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_points("nonexist") @raises(NotImplementedError) def test_create_scheduled_delete(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.create_scheduled_delete([]) @raises(NotImplementedError) def test_get_list_scheduled_delete(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.get_list_scheduled_delete() @raises(NotImplementedError) def test_remove_scheduled_delete(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.remove_scheduled_delete(1) def test_query(self): data = [ { "name": "foo", "columns": ["time", "sequence_number", "column_one"], "points": [ [1383876043, 16, "2"], [1383876043, 15, "1"], [1383876035, 14, "2"], [1383876035, 13, "1"] ] } ] with _mocked_session('get', 200, data): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') result = cli.query('select column_one from foo;') self.assertEqual(len(result[0]['points']), 4) def test_query_chunked(self): cli = InfluxDBClient(database='db') example_object = { 'points': [ [1415206250119, 40001, 667], [1415206244555, 30001, 7], [1415206228241, 20001, 788], [1415206212980, 10001, 555], [1415197271586, 10001, 23] ], 'name': 'foo', 'columns': [ 'time', 'sequence_number', 'val' ] } example_response = \ json.dumps(example_object) + json.dumps(example_object) with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response ) self.assertListEqual( cli.query('select * from foo', chunked=True), [example_object, example_object] ) def test_query_chunked_unicode(self): cli = InfluxDBClient(database='db') example_object = { 'points': [ [1415206212980, 10001, u('unicode-\xcf\x89')], [1415197271586, 10001, u('more-unicode-\xcf\x90')] ], 'name': 'foo', 'columns': [ 'time', 'sequence_number', 'val' ] } example_response = \ json.dumps(example_object) + json.dumps(example_object) with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response ) self.assertListEqual( cli.query('select * from foo', chunked=True), [example_object, example_object] ) @raises(Exception) def test_query_fail(self): with _mocked_session('get', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.query('select column_one from foo;') def test_query_bad_precision(self): cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" ): cli.query('select column_one from foo', time_precision='g') def test_create_database(self): with _mocked_session('post', 201, {"name": "new_db"}): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.create_database('new_db')) @raises(Exception) def test_create_database_fails(self): with _mocked_session('post', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.create_database('new_db') def test_delete_database(self): with _mocked_session('delete', 204): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.delete_database('old_db')) @raises(Exception) def test_delete_database_fails(self): with _mocked_session('delete', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_database('old_db') def test_get_list_database(self): data = [ {"name": "a_db"} ] with _mocked_session('get', 200, data): cli = InfluxDBClient('host', 8086, 'username', 'password') self.assertEqual(len(cli.get_list_database()), 1) self.assertEqual(cli.get_list_database()[0]['name'], 'a_db') @raises(Exception) def test_get_list_database_fails(self): with _mocked_session('get', 401): cli = InfluxDBClient('host', 8086, 'username', 'password') cli.get_list_database() @raises(FutureWarning) def test_get_database_list_deprecated(self): data = [ {"name": "a_db"} ] with _mocked_session('get', 200, data): cli = InfluxDBClient('host', 8086, 'username', 'password') self.assertEqual(len(cli.get_database_list()), 1) self.assertEqual(cli.get_database_list()[0]['name'], 'a_db') def test_delete_series(self): with _mocked_session('delete', 204): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_series('old_series') @raises(Exception) def test_delete_series_fails(self): with _mocked_session('delete', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_series('old_series') def test_get_series_list(self): cli = InfluxDBClient(database='db') with requests_mock.Mocker() as m: example_response = \ '[{"name":"list_series_result","columns":' \ '["time","name"],"points":[[0,"foo"],[0,"bar"]]}]' m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response ) self.assertListEqual( cli.get_list_series(), ['foo', 'bar'] ) def test_get_continuous_queries(self): cli = InfluxDBClient(database='db') with requests_mock.Mocker() as m: # Tip: put this in a json linter! example_response = '[ { "name": "continuous queries", "columns"' \ ': [ "time", "id", "query" ], "points": [ [ ' \ '0, 1, "select foo(bar,95) from \\"foo_bar' \ 's\\" group by time(5m) into response_times.' \ 'percentiles.5m.95" ], [ 0, 2, "select perce' \ 'ntile(value,95) from \\"response_times\\" g' \ 'roup by time(5m) into response_times.percen' \ 'tiles.5m.95" ] ] } ]' m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response ) self.assertListEqual( cli.get_list_continuous_queries(), [ 'select foo(bar,95) from "foo_bars" group ' 'by time(5m) into response_times.percentiles.5m.95', 'select percentile(value,95) from "response_times" group ' 'by time(5m) into response_times.percentiles.5m.95' ] ) def test_get_list_cluster_admins(self): pass def test_add_cluster_admin(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/cluster_admins" ) cli = InfluxDBClient(database='db') cli.add_cluster_admin( new_username='paul', new_password='laup' ) self.assertDictEqual( json.loads(m.last_request.body), { 'name': 'paul', 'password': 'laup' } ) def test_update_cluster_admin_password(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/cluster_admins/paul" ) cli = InfluxDBClient(database='db') cli.update_cluster_admin_password( username='paul', new_password='laup' ) self.assertDictEqual( json.loads(m.last_request.body), {'password': 'laup'} ) def test_delete_cluster_admin(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.DELETE, "http://localhost:8086/cluster_admins/paul", status_code=200, ) cli = InfluxDBClient(database='db') cli.delete_cluster_admin(username='paul') self.assertIsNone(m.last_request.body) def test_set_database_admin(self): pass def test_unset_database_admin(self): pass def test_alter_database_admin(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/users/paul" ) cli = InfluxDBClient(database='db') cli.alter_database_admin( username='paul', is_admin=False ) self.assertDictEqual( json.loads(m.last_request.body), { 'admin': False } ) @raises(NotImplementedError) def test_get_list_database_admins(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.get_list_database_admins() @raises(NotImplementedError) def test_add_database_admin(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.add_database_admin('admin', 'admin_secret_password') @raises(NotImplementedError) def test_update_database_admin_password(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.update_database_admin_password('admin', 'admin_secret_password') @raises(NotImplementedError) def test_delete_database_admin(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_database_admin('admin') def test_get_database_users(self): cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') example_response = \ '[{"name":"paul","isAdmin":false,"writeTo":".*","readFrom":".*"},'\ '{"name":"bobby","isAdmin":false,"writeTo":".*","readFrom":".*"}]' with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/users", text=example_response ) users = cli.get_database_users() self.assertEqual(json.loads(example_response), users) def test_add_database_user(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/users" ) cli = InfluxDBClient(database='db') cli.add_database_user( new_username='paul', new_password='laup', permissions=('.*', '.*') ) self.assertDictEqual( json.loads(m.last_request.body), { 'writeTo': '.*', 'password': 'laup', 'readFrom': '.*', 'name': 'paul' } ) def test_add_database_user_bad_permissions(self): cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, "'permissions' must be \(readFrom, writeTo\) tuple" ): cli.add_database_user( new_password='paul', new_username='paul', permissions=('hello', 'hello', 'hello') ) def test_alter_database_user_password(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/users/paul" ) cli = InfluxDBClient(database='db') cli.alter_database_user( username='paul', password='n3wp4ss!' ) self.assertDictEqual( json.loads(m.last_request.body), { 'password': 'n3wp4ss!' } ) def test_alter_database_user_permissions(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/users/paul" ) cli = InfluxDBClient(database='db') cli.alter_database_user( username='paul', permissions=('^$', '.*') ) self.assertDictEqual( json.loads(m.last_request.body), { 'readFrom': '^$', 'writeTo': '.*' } ) def test_alter_database_user_password_and_permissions(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/users/paul" ) cli = InfluxDBClient(database='db') cli.alter_database_user( username='paul', password='n3wp4ss!', permissions=('^$', '.*') ) self.assertDictEqual( json.loads(m.last_request.body), { 'password': 'n3wp4ss!', 'readFrom': '^$', 'writeTo': '.*' } ) def test_update_database_user_password_current_user(self): cli = InfluxDBClient( username='root', password='hello', database='database' ) with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/database/users/root" ) cli.update_database_user_password( username='root', new_password='bye' ) self.assertEqual(cli._password, 'bye') def test_delete_database_user(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.DELETE, "http://localhost:8086/db/db/users/paul" ) cli = InfluxDBClient(database='db') cli.delete_database_user(username='paul') self.assertIsNone(m.last_request.body) @raises(NotImplementedError) def test_update_permission(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.update_permission('admin', []) @mock.patch('requests.Session.request') def test_request_retry(self, mock_request): """Tests that two connection errors will be handled""" class CustomMock(object): i = 0 def connection_error(self, *args, **kwargs): self.i += 1 if self.i < 3: raise requests.exceptions.ConnectionError else: r = requests.Response() r.status_code = 200 return r mock_request.side_effect = CustomMock().connection_error cli = InfluxDBClient(database='db') cli.write_points( self.dummy_points ) @mock.patch('requests.Session.request') def test_request_retry_raises(self, mock_request): """Tests that three connection errors will not be handled""" class CustomMock(object): i = 0 def connection_error(self, *args, **kwargs): self.i += 1 if self.i < 4: raise requests.exceptions.ConnectionError else: r = requests.Response() r.status_code = 200 return r mock_request.side_effect = CustomMock().connection_error cli = InfluxDBClient(database='db') with self.assertRaises(requests.exceptions.ConnectionError): cli.write_points(self.dummy_points) influxdb-2.12.0/influxdb/tests/influxdb08/helper_test.py0000644000175000017500000002011112652700251024022 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest import warnings import mock from influxdb.influxdb08 import SeriesHelper, InfluxDBClient from requests.exceptions import ConnectionError class TestSeriesHelper(unittest.TestCase): @classmethod def setUpClass(cls): super(TestSeriesHelper, cls).setUpClass() TestSeriesHelper.client = InfluxDBClient( 'host', 8086, 'username', 'password', 'database' ) class MySeriesHelper(SeriesHelper): class Meta: client = TestSeriesHelper.client series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] bulk_size = 5 autocommit = True TestSeriesHelper.MySeriesHelper = MySeriesHelper def test_auto_commit(self): """ Tests that write_points is called after the right number of events """ class AutoCommitTest(SeriesHelper): class Meta: series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] bulk_size = 5 client = InfluxDBClient() autocommit = True fake_write_points = mock.MagicMock() AutoCommitTest(server_name='us.east-1', time=159) AutoCommitTest._client.write_points = fake_write_points AutoCommitTest(server_name='us.east-1', time=158) AutoCommitTest(server_name='us.east-1', time=157) AutoCommitTest(server_name='us.east-1', time=156) self.assertFalse(fake_write_points.called) AutoCommitTest(server_name='us.east-1', time=3443) self.assertTrue(fake_write_points.called) def testSingleSeriesName(self): """ Tests JSON conversion when there is only one series name. """ TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159) TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=158) TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=157) TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=156) expectation = [{'points': [[159, 'us.east-1'], [158, 'us.east-1'], [157, 'us.east-1'], [156, 'us.east-1']], 'name': 'events.stats.us.east-1', 'columns': ['time', 'server_name']}] rcvd = TestSeriesHelper.MySeriesHelper._json_body_() self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' '_json_body_ for one series name: {0}.'.format(rcvd)) TestSeriesHelper.MySeriesHelper._reset_() self.assertEqual( TestSeriesHelper.MySeriesHelper._json_body_(), [], 'Resetting helper did not empty datapoints.') def testSeveralSeriesNames(self): ''' Tests JSON conversion when there is only one series name. ''' TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159) TestSeriesHelper.MySeriesHelper(server_name='fr.paris-10', time=158) TestSeriesHelper.MySeriesHelper(server_name='lu.lux', time=157) TestSeriesHelper.MySeriesHelper(server_name='uk.london', time=156) expectation = [{'points': [[157, 'lu.lux']], 'name': 'events.stats.lu.lux', 'columns': ['time', 'server_name']}, {'points': [[156, 'uk.london']], 'name': 'events.stats.uk.london', 'columns': ['time', 'server_name']}, {'points': [[158, 'fr.paris-10']], 'name': 'events.stats.fr.paris-10', 'columns': ['time', 'server_name']}, {'points': [[159, 'us.east-1']], 'name': 'events.stats.us.east-1', 'columns': ['time', 'server_name']}] rcvd = TestSeriesHelper.MySeriesHelper._json_body_() self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' '_json_body_ for several series names: {0}.' .format(rcvd)) TestSeriesHelper.MySeriesHelper._reset_() self.assertEqual( TestSeriesHelper.MySeriesHelper._json_body_(), [], 'Resetting helper did not empty datapoints.') def testInvalidHelpers(self): ''' Tests errors in invalid helpers. ''' class MissingMeta(SeriesHelper): pass class MissingClient(SeriesHelper): class Meta: series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] autocommit = True class MissingSeriesName(SeriesHelper): class Meta: fields = ['time', 'server_name'] class MissingFields(SeriesHelper): class Meta: series_name = 'events.stats.{server_name}' for cls in [MissingMeta, MissingClient, MissingFields, MissingSeriesName]: self.assertRaises( AttributeError, cls, **{'time': 159, 'server_name': 'us.east-1'}) def testWarnBulkSizeZero(self): """ Tests warning for an invalid bulk size. """ class WarnBulkSizeZero(SeriesHelper): class Meta: client = TestSeriesHelper.client series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] bulk_size = 0 autocommit = True with warnings.catch_warnings(record=True) as rec_warnings: warnings.simplefilter("always") # Server defined in the client is invalid, we're testing # the warning only. with self.assertRaises(ConnectionError): WarnBulkSizeZero(time=159, server_name='us.east-1') self.assertGreaterEqual( len(rec_warnings), 1, '{0} call should have generated one warning.' 'Actual generated warnings: {1}'.format( WarnBulkSizeZero, '\n'.join(map(str, rec_warnings)))) expected_msg = ( 'Definition of bulk_size in WarnBulkSizeZero forced to 1, ' 'was less than 1.') self.assertIn(expected_msg, list(w.message.args[0] for w in rec_warnings), 'Warning message did not contain "forced to 1".') def testWarnBulkSizeNoEffect(self): """ Tests warning for a set bulk size but autocommit False. """ class WarnBulkSizeNoEffect(SeriesHelper): class Meta: series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] bulk_size = 5 autocommit = False with warnings.catch_warnings(record=True) as rec_warnings: warnings.simplefilter("always") WarnBulkSizeNoEffect(time=159, server_name='us.east-1') self.assertGreaterEqual( len(rec_warnings), 1, '{0} call should have generated one warning.' 'Actual generated warnings: {1}'.format( WarnBulkSizeNoEffect, '\n'.join(map(str, rec_warnings)))) expected_msg = ( 'Definition of bulk_size in WarnBulkSizeNoEffect has no affect ' 'because autocommit is false.') self.assertIn(expected_msg, list(w.message.args[0] for w in rec_warnings), 'Warning message did not contain the expected_msg.') if __name__ == '__main__': unittest.main() influxdb-2.12.0/influxdb/tests/dataframe_client_test.py0000644000175000017500000003217512652700251024057 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ unit tests for misc module """ from .client_test import _mocked_session import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest import json import requests_mock from nose.tools import raises from datetime import timedelta from influxdb.tests import skipIfPYpy, using_pypy import warnings if not using_pypy: import pandas as pd from pandas.util.testing import assert_frame_equal from influxdb import DataFrameClient @skipIfPYpy class TestDataFrameClient(unittest.TestCase): def setUp(self): # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) def test_write_points_from_dataframe(self): now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], columns=["column_one", "column_two", "column_three"]) expected = ( b"foo column_one=\"1\",column_three=1.0,column_two=1i 0\n" b"foo column_one=\"2\",column_three=2.0,column_two=2i " b"3600000000000\n" ) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204) cli = DataFrameClient(database='db') cli.write_points(dataframe, 'foo') self.assertEqual(m.last_request.body, expected) cli.write_points(dataframe, 'foo', tags=None) self.assertEqual(m.last_request.body, expected) def test_write_points_from_dataframe_in_batches(self): now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], columns=["column_one", "column_two", "column_three"]) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204) cli = DataFrameClient(database='db') self.assertTrue(cli.write_points(dataframe, "foo", batch_size=1)) def test_write_points_from_dataframe_with_numeric_column_names(self): now = pd.Timestamp('1970-01-01 00:00+00:00') # df with numeric column names dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)]) expected = ( b'foo,hello=there 0=\"1\",1=1i,2=1.0 0\n' b'foo,hello=there 0=\"2\",1=2i,2=2.0 3600000000000\n' ) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204) cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo", {"hello": "there"}) self.assertEqual(m.last_request.body, expected) def test_write_points_from_dataframe_with_period_index(self): dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[pd.Period('1970-01-01'), pd.Period('1970-01-02')], columns=["column_one", "column_two", "column_three"]) expected = ( b"foo column_one=\"1\",column_three=1.0,column_two=1i 0\n" b"foo column_one=\"2\",column_three=2.0,column_two=2i " b"86400000000000\n" ) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204) cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo") self.assertEqual(m.last_request.body, expected) def test_write_points_from_dataframe_with_time_precision(self): now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], index=[now, now + timedelta(hours=1)], columns=["column_one", "column_two", "column_three"]) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204) cli = DataFrameClient(database='db') measurement = "foo" cli.write_points(dataframe, measurement, time_precision='h') self.assertEqual(m.last_request.qs['precision'], ['h']) self.assertEqual( b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' b'column_one="2",column_three=2.0,column_two=2i 1\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='m') self.assertEqual(m.last_request.qs['precision'], ['m']) self.assertEqual( b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' b'column_one="2",column_three=2.0,column_two=2i 60\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='s') self.assertEqual(m.last_request.qs['precision'], ['s']) self.assertEqual( b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' b'column_one="2",column_three=2.0,column_two=2i 3600\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='ms') self.assertEqual(m.last_request.qs['precision'], ['ms']) self.assertEqual( b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' b'column_one="2",column_three=2.0,column_two=2i 3600000\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='u') self.assertEqual(m.last_request.qs['precision'], ['u']) self.assertEqual( b'foo column_one="1",column_three=1.0,column_two=1i 0\nfoo ' b'column_one="2",column_three=2.0,column_two=2i 3600000000\n', m.last_request.body, ) cli.write_points(dataframe, measurement, time_precision='n') self.assertEqual(m.last_request.qs['precision'], ['n']) self.assertEqual( b'foo column_one="1",column_three=1.0,column_two=1i 0\n' b'foo column_one="2",column_three=2.0,column_two=2i ' b'3600000000000\n', m.last_request.body, ) @raises(TypeError) def test_write_points_from_dataframe_fails_without_time_index(self): dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]], columns=["column_one", "column_two", "column_three"]) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series", status_code=204) cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo") @raises(TypeError) def test_write_points_from_dataframe_fails_with_series(self): now = pd.Timestamp('1970-01-01 00:00+00:00') dataframe = pd.Series(data=[1.0, 2.0], index=[now, now + timedelta(hours=1)]) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series", status_code=204) cli = DataFrameClient(database='db') cli.write_points(dataframe, "foo") def test_query_into_dataframe(self): data = { "results": [{ "series": [ {"measurement": "network", "tags": {"direction": ""}, "columns": ["time", "value"], "values":[["2009-11-10T23:00:00Z", 23422]] }, {"measurement": "network", "tags": {"direction": "in"}, "columns": ["time", "value"], "values": [["2009-11-10T23:00:00Z", 23422], ["2009-11-10T23:00:00Z", 23422], ["2009-11-10T23:00:00Z", 23422]] } ] }] } pd1 = pd.DataFrame( [[23422]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z"])) pd1.index = pd1.index.tz_localize('UTC') pd2 = pd.DataFrame( [[23422], [23422], [23422]], columns=['value'], index=pd.to_datetime(["2009-11-10T23:00:00Z", "2009-11-10T23:00:00Z", "2009-11-10T23:00:00Z"])) pd2.index = pd2.index.tz_localize('UTC') expected = { ('network', (('direction', ''),)): pd1, ('network', (('direction', 'in'),)): pd2 } cli = DataFrameClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'GET', 200, data): result = cli.query('select value from network group by direction;') for k in expected: assert_frame_equal(expected[k], result[k]) def test_query_with_empty_result(self): cli = DataFrameClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'GET', 200, {"results": [{}]}): result = cli.query('select column_one from foo;') self.assertEqual(result, {}) def test_list_series(self): response = { 'results': [ {'series': [ { 'columns': ['host'], 'measurement': 'cpu', 'values': [ ['server01']] }, { 'columns': [ 'host', 'region' ], 'measurement': 'network', 'values': [ [ 'server01', 'us-west' ], [ 'server01', 'us-east' ] ] } ]} ] } expected = { 'cpu': pd.DataFrame([['server01']], columns=['host']), 'network': pd.DataFrame( [['server01', 'us-west'], ['server01', 'us-east']], columns=['host', 'region'])} cli = DataFrameClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'GET', 200, response): series = cli.get_list_series() assert_frame_equal(series['cpu'], expected['cpu']) assert_frame_equal(series['network'], expected['network']) def test_get_list_database(self): data = {'results': [ {'series': [ {'measurement': 'databases', 'values': [ ['new_db_1'], ['new_db_2']], 'columns': ['name']}]} ]} cli = DataFrameClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'get', 200, json.dumps(data)): self.assertListEqual( cli.get_list_database(), [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) def test_datetime_to_epoch(self): timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00') cli = DataFrameClient('host', 8086, 'username', 'password', 'db') self.assertEqual( cli._datetime_to_epoch(timestamp), 1356998400.0 ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='h'), 1356998400.0 / 3600 ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='m'), 1356998400.0 / 60 ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='s'), 1356998400.0 ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='ms'), 1356998400000.0 ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='u'), 1356998400000000.0 ) self.assertEqual( cli._datetime_to_epoch(timestamp, time_precision='n'), 1356998400000000000.0 ) influxdb-2.12.0/influxdb/tests/client_test.py0000644000175000017500000010010712652700251022042 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ unit tests for the InfluxDBClient. NB/WARNING : This module implements tests for the InfluxDBClient class but does so + without any server instance running + by mocking all the expected responses. So any change of (response format from) the server will **NOT** be detected by this module. See client_test_with_server.py for tests against a running server instance. """ import json import requests import requests.exceptions import socket import time import requests_mock import random from nose.tools import raises from mock import patch import warnings import mock import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest from influxdb import InfluxDBClient, InfluxDBClusterClient from influxdb.client import InfluxDBServerError def _build_response_object(status_code=200, content=""): resp = requests.Response() resp.status_code = status_code resp._content = content.encode("utf8") return resp def _mocked_session(cli, method="GET", status_code=200, content=""): method = method.upper() def request(*args, **kwargs): c = content # Check method assert method == kwargs.get('method', 'GET') if method == 'POST': data = kwargs.get('data', None) if data is not None: # Data must be a string assert isinstance(data, str) # Data must be a JSON string assert c == json.loads(data, strict=True) c = data # Anyway, Content must be a JSON string (or empty string) if not isinstance(c, str): c = json.dumps(c) return _build_response_object(status_code=status_code, content=c) mocked = patch.object( cli._session, 'request', side_effect=request ) return mocked class TestInfluxDBClient(unittest.TestCase): def setUp(self): # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) self.cli = InfluxDBClient('localhost', 8086, 'username', 'password') self.dummy_points = [ { "measurement": "cpu_load_short", "tags": { "host": "server01", "region": "us-west" }, "time": "2009-11-10T23:00:00.123456Z", "fields": { "value": 0.64 } } ] self.dsn_string = 'influxdb://uSr:pWd@my.host.fr:1886/db' def test_scheme(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') self.assertEqual('http://host:8086', cli._baseurl) cli = InfluxDBClient( 'host', 8086, 'username', 'password', 'database', ssl=True ) self.assertEqual('https://host:8086', cli._baseurl) def test_dsn(self): cli = InfluxDBClient.from_DSN('influxdb://192.168.0.1:1886') self.assertEqual('http://192.168.0.1:1886', cli._baseurl) cli = InfluxDBClient.from_DSN(self.dsn_string) self.assertEqual('http://my.host.fr:1886', cli._baseurl) self.assertEqual('uSr', cli._username) self.assertEqual('pWd', cli._password) self.assertEqual('db', cli._database) self.assertFalse(cli.use_udp) cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string) self.assertTrue(cli.use_udp) cli = InfluxDBClient.from_DSN('https+' + self.dsn_string) self.assertEqual('https://my.host.fr:1886', cli._baseurl) cli = InfluxDBClient.from_DSN('https+' + self.dsn_string, **{'ssl': False}) self.assertEqual('http://my.host.fr:1886', cli._baseurl) def test_switch_database(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_database('another_database') self.assertEqual('another_database', cli._database) def test_switch_user(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_user('another_username', 'another_password') self.assertEqual('another_username', cli._username) self.assertEqual('another_password', cli._password) def test_write(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/write", status_code=204 ) cli = InfluxDBClient(database='db') cli.write( {"database": "mydb", "retentionPolicy": "mypolicy", "points": [{"measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "time": "2009-11-10T23:00:00Z", "fields": {"value": 0.64}}]} ) self.assertEqual( m.last_request.body, b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000000000000\n", ) def test_write_points(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/write", status_code=204 ) cli = InfluxDBClient(database='db') cli.write_points( self.dummy_points, ) self.assertEqual( 'cpu_load_short,host=server01,region=us-west ' 'value=0.64 1257894000123456000\n', m.last_request.body.decode('utf-8'), ) def test_write_points_toplevel_attributes(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/write", status_code=204 ) cli = InfluxDBClient(database='db') cli.write_points( self.dummy_points, database='testdb', tags={"tag": "hello"}, retention_policy="somepolicy" ) self.assertEqual( 'cpu_load_short,host=server01,region=us-west,tag=hello ' 'value=0.64 1257894000123456000\n', m.last_request.body.decode('utf-8'), ) def test_write_points_batch(self): dummy_points = [ {"measurement": "cpu_usage", "tags": {"unit": "percent"}, "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}}, {"measurement": "network", "tags": {"direction": "in"}, "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}}, {"measurement": "network", "tags": {"direction": "out"}, "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}} ] expected_last_body = ( "network,direction=out,host=server01,region=us-west " "value=12.0 1257894000000000000\n" ) with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204) cli = InfluxDBClient(database='db') cli.write_points(points=dummy_points, database='db', tags={"host": "server01", "region": "us-west"}, batch_size=2) self.assertEqual(m.call_count, 2) self.assertEqual(expected_last_body, m.last_request.body.decode('utf-8')) def test_write_points_udp(self): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) port = random.randint(4000, 8000) s.bind(('0.0.0.0', port)) cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', 'test', use_udp=True, udp_port=port ) cli.write_points(self.dummy_points) received_data, addr = s.recvfrom(1024) self.assertEqual( 'cpu_load_short,host=server01,region=us-west ' 'value=0.64 1257894000123456000\n', received_data.decode() ) def test_write_bad_precision_udp(self): cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', 'test', use_udp=True, udp_port=4444 ) with self.assertRaisesRegexp( Exception, "InfluxDB only supports seconds precision for udp writes" ): cli.write_points( self.dummy_points, time_precision='ms' ) @raises(Exception) def test_write_points_fails(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'post', 500): cli.write_points([]) def test_write_points_with_precision(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/write", status_code=204 ) cli = InfluxDBClient(database='db') cli.write_points(self.dummy_points, time_precision='n') self.assertEqual( b'cpu_load_short,host=server01,region=us-west ' b'value=0.64 1257894000123456000\n', m.last_request.body, ) cli.write_points(self.dummy_points, time_precision='u') self.assertEqual( b'cpu_load_short,host=server01,region=us-west ' b'value=0.64 1257894000123456\n', m.last_request.body, ) cli.write_points(self.dummy_points, time_precision='ms') self.assertEqual( b'cpu_load_short,host=server01,region=us-west ' b'value=0.64 1257894000123\n', m.last_request.body, ) cli.write_points(self.dummy_points, time_precision='s') self.assertEqual( b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000\n", m.last_request.body, ) cli.write_points(self.dummy_points, time_precision='m') self.assertEqual( b'cpu_load_short,host=server01,region=us-west ' b'value=0.64 20964900\n', m.last_request.body, ) cli.write_points(self.dummy_points, time_precision='h') self.assertEqual( b'cpu_load_short,host=server01,region=us-west ' b'value=0.64 349415\n', m.last_request.body, ) def test_write_points_bad_precision(self): cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, "Invalid time precision is given. " "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)" ): cli.write_points( self.dummy_points, time_precision='g' ) @raises(Exception) def test_write_points_with_precision_fails(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'post', 500): cli.write_points_with_precision([]) def test_query(self): example_response = ( '{"results": [{"series": [{"measurement": "sdfsdfsdf", ' '"columns": ["time", "value"], "values": ' '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' '[{"measurement": "cpu_load_short", "columns": ["time", "value"], ' '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}' ) with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text=example_response ) rs = self.cli.query('select * from foo') self.assertListEqual( list(rs[0].get_points()), [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}] ) @unittest.skip('Not implemented for 0.9') def test_query_chunked(self): cli = InfluxDBClient(database='db') example_object = { 'points': [ [1415206250119, 40001, 667], [1415206244555, 30001, 7], [1415206228241, 20001, 788], [1415206212980, 10001, 555], [1415197271586, 10001, 23] ], 'measurement': 'foo', 'columns': [ 'time', 'sequence_number', 'val' ] } example_response = \ json.dumps(example_object) + json.dumps(example_object) with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response ) self.assertListEqual( cli.query('select * from foo', chunked=True), [example_object, example_object] ) @raises(Exception) def test_query_fail(self): with _mocked_session(self.cli, 'get', 401): self.cli.query('select column_one from foo;') def test_create_database(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}' ) self.cli.create_database('new_db') self.assertEqual( m.last_request.qs['q'][0], 'create database "new_db"' ) def test_create_database_with_exist_check(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}' ) self.cli.create_database('new_db', if_not_exists=True) self.assertEqual( m.last_request.qs['q'][0], 'create database if not exists "new_db"' ) def test_create_numeric_named_database(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}' ) self.cli.create_database('123') self.assertEqual( m.last_request.qs['q'][0], 'create database "123"' ) @raises(Exception) def test_create_database_fails(self): with _mocked_session(self.cli, 'post', 401): self.cli.create_database('new_db') def test_drop_database(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}' ) self.cli.drop_database('new_db') self.assertEqual( m.last_request.qs['q'][0], 'drop database "new_db"' ) def test_drop_numeric_named_database(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}' ) self.cli.drop_database('123') self.assertEqual( m.last_request.qs['q'][0], 'drop database "123"' ) @raises(Exception) def test_drop_database_fails(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') with _mocked_session(cli, 'delete', 401): cli.drop_database('old_db') def test_get_list_database(self): data = {'results': [ {'series': [ {'name': 'databases', 'values': [ ['new_db_1'], ['new_db_2']], 'columns': ['name']}]} ]} with _mocked_session(self.cli, 'get', 200, json.dumps(data)): self.assertListEqual( self.cli.get_list_database(), [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) @raises(Exception) def test_get_list_database_fails(self): cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 401): cli.get_list_database() def test_get_list_servers(self): data = {'results': [ {'series': [ {'columns': ['id', 'cluster_addr', 'raft', 'raft-leader'], 'values': [ [1, 'server01:8088', True, True], [2, 'server02:8088', True, False], [3, 'server03:8088', True, False]]}]} ]} with _mocked_session(self.cli, 'get', 200, json.dumps(data)): self.assertListEqual( self.cli.get_list_servers(), [{'cluster_addr': 'server01:8088', 'id': 1, 'raft': True, 'raft-leader': True}, {'cluster_addr': 'server02:8088', 'id': 2, 'raft': True, 'raft-leader': False}, {'cluster_addr': 'server03:8088', 'id': 3, 'raft': True, 'raft-leader': False}] ) @raises(Exception) def test_get_list_servers_fails(self): cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 401): cli.get_list_servers() def test_get_list_series(self): example_response = \ '{"results": [{"series": [{"name": "cpu_load_short", "columns": ' \ '["_id", "host", "region"], "values": ' \ '[[1, "server01", "us-west"]]}]}]}' with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text=example_response ) self.assertListEqual( self.cli.get_list_series(), [{'name': 'cpu_load_short', 'tags': [ {'host': 'server01', '_id': 1, 'region': 'us-west'} ]}] ) def test_create_retention_policy_default(self): example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text=example_response ) self.cli.create_retention_policy( 'somename', '1d', 4, default=True, database='db' ) self.assertEqual( m.last_request.qs['q'][0], 'create retention policy somename on ' 'db duration 1d replication 4 default' ) def test_create_retention_policy(self): example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text=example_response ) self.cli.create_retention_policy( 'somename', '1d', 4, database='db' ) self.assertEqual( m.last_request.qs['q'][0], 'create retention policy somename on ' 'db duration 1d replication 4' ) def test_alter_retention_policy(self): example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text=example_response ) # Test alter duration self.cli.alter_retention_policy('somename', 'db', duration='4d') self.assertEqual( m.last_request.qs['q'][0], 'alter retention policy somename on db duration 4d' ) # Test alter replication self.cli.alter_retention_policy('somename', 'db', replication=4) self.assertEqual( m.last_request.qs['q'][0], 'alter retention policy somename on db replication 4' ) # Test alter default self.cli.alter_retention_policy('somename', 'db', default=True) self.assertEqual( m.last_request.qs['q'][0], 'alter retention policy somename on db default' ) @raises(Exception) def test_alter_retention_policy_invalid(self): cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 400): self.cli.alter_retention_policy('somename', 'db') def test_get_list_retention_policies(self): example_response = \ '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\ ' "columns": ["name", "duration", "replicaN"]}]}]}' with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text=example_response ) self.assertListEqual( self.cli.get_list_retention_policies(), [{'duration': '24h0m0s', 'name': 'fsfdsdf', 'replicaN': 2}] ) @mock.patch('requests.Session.request') def test_request_retry(self, mock_request): """Tests that two connection errors will be handled""" class CustomMock(object): i = 0 def connection_error(self, *args, **kwargs): self.i += 1 if self.i < 3: raise requests.exceptions.ConnectionError else: r = requests.Response() r.status_code = 204 return r mock_request.side_effect = CustomMock().connection_error cli = InfluxDBClient(database='db') cli.write_points( self.dummy_points ) @mock.patch('requests.Session.request') def test_request_retry_raises(self, mock_request): """Tests that three connection errors will not be handled""" class CustomMock(object): i = 0 def connection_error(self, *args, **kwargs): self.i += 1 if self.i < 4: raise requests.exceptions.ConnectionError else: r = requests.Response() r.status_code = 200 return r mock_request.side_effect = CustomMock().connection_error cli = InfluxDBClient(database='db') with self.assertRaises(requests.exceptions.ConnectionError): cli.write_points(self.dummy_points) def test_get_list_users(self): example_response = ( '{"results":[{"series":[{"columns":["user","admin"],' '"values":[["test",false]]}]}]}' ) with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text=example_response ) self.assertListEqual( self.cli.get_list_users(), [{'user': 'test', 'admin': False}] ) def test_get_list_users_empty(self): example_response = ( '{"results":[{"series":[{"columns":["user","admin"]}]}]}' ) with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text=example_response ) self.assertListEqual(self.cli.get_list_users(), []) def test_revoke_admin_privileges(self): example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text=example_response ) self.cli.revoke_admin_privileges('test') self.assertEqual( m.last_request.qs['q'][0], 'revoke all privileges from test' ) @raises(Exception) def test_revoke_admin_privileges_invalid(self): cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 400): self.cli.revoke_admin_privileges('') def test_grant_privilege(self): example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text=example_response ) self.cli.grant_privilege('read', 'testdb', 'test') self.assertEqual( m.last_request.qs['q'][0], 'grant read on testdb to test' ) @raises(Exception) def test_grant_privilege_invalid(self): cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 400): self.cli.grant_privilege('', 'testdb', 'test') def test_revoke_privilege(self): example_response = '{"results":[{}]}' with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/query", text=example_response ) self.cli.revoke_privilege('read', 'testdb', 'test') self.assertEqual( m.last_request.qs['q'][0], 'revoke read on testdb from test' ) @raises(Exception) def test_revoke_privilege_invalid(self): cli = InfluxDBClient('host', 8086, 'username', 'password') with _mocked_session(cli, 'get', 400): self.cli.revoke_privilege('', 'testdb', 'test') class FakeClient(InfluxDBClient): def __init__(self, *args, **kwargs): super(FakeClient, self).__init__(*args, **kwargs) def query(self, query, params={}, expected_response_code=200, database=None): if query == 'Fail': raise Exception("Fail") elif query == 'Fail once' and self._host == 'host1': raise Exception("Fail Once") elif query == 'Fail twice' and self._host in 'host1 host2': raise Exception("Fail Twice") else: return "Success" class TestInfluxDBClusterClient(unittest.TestCase): def setUp(self): # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) self.hosts = [('host1', 8086), ('host2', 8086), ('host3', 8086)] self.dsn_string = 'influxdb://uSr:pWd@host1:8086,uSr:pWd@host2:8086/db' def test_init(self): cluster = InfluxDBClusterClient(hosts=self.hosts, username='username', password='password', database='database', shuffle=False, client_base_class=FakeClient) self.assertEqual(3, len(cluster.hosts)) self.assertEqual(0, len(cluster.bad_hosts)) self.assertIn((cluster._client._host, cluster._client._port), cluster.hosts) def test_one_server_fails(self): cluster = InfluxDBClusterClient(hosts=self.hosts, database='database', shuffle=False, client_base_class=FakeClient) self.assertEqual('Success', cluster.query('Fail once')) self.assertEqual(2, len(cluster.hosts)) self.assertEqual(1, len(cluster.bad_hosts)) def test_two_servers_fail(self): cluster = InfluxDBClusterClient(hosts=self.hosts, database='database', shuffle=False, client_base_class=FakeClient) self.assertEqual('Success', cluster.query('Fail twice')) self.assertEqual(1, len(cluster.hosts)) self.assertEqual(2, len(cluster.bad_hosts)) def test_all_fail(self): cluster = InfluxDBClusterClient(hosts=self.hosts, database='database', shuffle=True, client_base_class=FakeClient) with self.assertRaises(InfluxDBServerError): cluster.query('Fail') self.assertEqual(0, len(cluster.hosts)) self.assertEqual(3, len(cluster.bad_hosts)) def test_all_good(self): cluster = InfluxDBClusterClient(hosts=self.hosts, database='database', shuffle=True, client_base_class=FakeClient) self.assertEqual('Success', cluster.query('')) self.assertEqual(3, len(cluster.hosts)) self.assertEqual(0, len(cluster.bad_hosts)) def test_recovery(self): cluster = InfluxDBClusterClient(hosts=self.hosts, database='database', shuffle=True, client_base_class=FakeClient) with self.assertRaises(InfluxDBServerError): cluster.query('Fail') self.assertEqual('Success', cluster.query('')) self.assertEqual(1, len(cluster.hosts)) self.assertEqual(2, len(cluster.bad_hosts)) def test_healing(self): cluster = InfluxDBClusterClient(hosts=self.hosts, database='database', shuffle=True, healing_delay=1, client_base_class=FakeClient) with self.assertRaises(InfluxDBServerError): cluster.query('Fail') self.assertEqual('Success', cluster.query('')) time.sleep(1.1) self.assertEqual('Success', cluster.query('')) self.assertEqual(2, len(cluster.hosts)) self.assertEqual(1, len(cluster.bad_hosts)) time.sleep(1.1) self.assertEqual('Success', cluster.query('')) self.assertEqual(3, len(cluster.hosts)) self.assertEqual(0, len(cluster.bad_hosts)) def test_dsn(self): cli = InfluxDBClusterClient.from_DSN(self.dsn_string) self.assertEqual([('host1', 8086), ('host2', 8086)], cli.hosts) self.assertEqual('http://host1:8086', cli._client._baseurl) self.assertEqual('uSr', cli._client._username) self.assertEqual('pWd', cli._client._password) self.assertEqual('db', cli._client._database) self.assertFalse(cli._client.use_udp) cli = InfluxDBClusterClient.from_DSN('udp+' + self.dsn_string) self.assertTrue(cli._client.use_udp) cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string) self.assertEqual('https://host1:8086', cli._client._baseurl) cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string, **{'ssl': False}) self.assertEqual('http://host1:8086', cli._client._baseurl) def test_dsn_password_caps(self): cli = InfluxDBClusterClient.from_DSN( 'https+influxdb://usr:pWd@host:8086/db') self.assertEqual('pWd', cli._client._password) def test_dsn_mixed_scheme_case(self): cli = InfluxDBClusterClient.from_DSN( 'hTTps+inFLUxdb://usr:pWd@host:8086/db') self.assertEqual('pWd', cli._client._password) self.assertEqual('https://host:8086', cli._client._baseurl) cli = InfluxDBClusterClient.from_DSN( 'uDP+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db') self.assertTrue(cli._client.use_udp) influxdb-2.12.0/influxdb/tests/helper_test.py0000644000175000017500000002271012652700251022046 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest import warnings import mock from influxdb import SeriesHelper, InfluxDBClient from requests.exceptions import ConnectionError class TestSeriesHelper(unittest.TestCase): @classmethod def setUpClass(cls): super(TestSeriesHelper, cls).setUpClass() TestSeriesHelper.client = InfluxDBClient( 'host', 8086, 'username', 'password', 'database' ) class MySeriesHelper(SeriesHelper): class Meta: client = TestSeriesHelper.client series_name = 'events.stats.{server_name}' fields = ['some_stat'] tags = ['server_name', 'other_tag'] bulk_size = 5 autocommit = True TestSeriesHelper.MySeriesHelper = MySeriesHelper def test_auto_commit(self): """ Tests that write_points is called after the right number of events """ class AutoCommitTest(SeriesHelper): class Meta: series_name = 'events.stats.{server_name}' fields = ['some_stat'] tags = ['server_name', 'other_tag'] bulk_size = 5 client = InfluxDBClient() autocommit = True fake_write_points = mock.MagicMock() AutoCommitTest(server_name='us.east-1', some_stat=159, other_tag='gg') AutoCommitTest._client.write_points = fake_write_points AutoCommitTest(server_name='us.east-1', some_stat=158, other_tag='gg') AutoCommitTest(server_name='us.east-1', some_stat=157, other_tag='gg') AutoCommitTest(server_name='us.east-1', some_stat=156, other_tag='gg') self.assertFalse(fake_write_points.called) AutoCommitTest(server_name='us.east-1', some_stat=3443, other_tag='gg') self.assertTrue(fake_write_points.called) def testSingleSeriesName(self): """ Tests JSON conversion when there is only one series name. """ TestSeriesHelper.MySeriesHelper( server_name='us.east-1', other_tag='ello', some_stat=159) TestSeriesHelper.MySeriesHelper( server_name='us.east-1', other_tag='ello', some_stat=158) TestSeriesHelper.MySeriesHelper( server_name='us.east-1', other_tag='ello', some_stat=157) TestSeriesHelper.MySeriesHelper( server_name='us.east-1', other_tag='ello', some_stat=156) expectation = [ { "measurement": "events.stats.us.east-1", "tags": { "other_tag": "ello", "server_name": "us.east-1" }, "fields": { "some_stat": 159 }, }, { "measurement": "events.stats.us.east-1", "tags": { "other_tag": "ello", "server_name": "us.east-1" }, "fields": { "some_stat": 158 }, }, { "measurement": "events.stats.us.east-1", "tags": { "other_tag": "ello", "server_name": "us.east-1" }, "fields": { "some_stat": 157 }, }, { "measurement": "events.stats.us.east-1", "tags": { "other_tag": "ello", "server_name": "us.east-1" }, "fields": { "some_stat": 156 }, } ] rcvd = TestSeriesHelper.MySeriesHelper._json_body_() self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' '_json_body_ for one series name: {0}.'.format(rcvd)) TestSeriesHelper.MySeriesHelper._reset_() self.assertEqual( TestSeriesHelper.MySeriesHelper._json_body_(), [], 'Resetting helper did not empty datapoints.') def testSeveralSeriesNames(self): ''' Tests JSON conversion when there is only one series name. ''' TestSeriesHelper.MySeriesHelper( server_name='us.east-1', some_stat=159, other_tag='ello') TestSeriesHelper.MySeriesHelper( server_name='fr.paris-10', some_stat=158, other_tag='ello') TestSeriesHelper.MySeriesHelper( server_name='lu.lux', some_stat=157, other_tag='ello') TestSeriesHelper.MySeriesHelper( server_name='uk.london', some_stat=156, other_tag='ello') expectation = [ { 'fields': { 'some_stat': 157 }, 'measurement': 'events.stats.lu.lux', 'tags': { 'other_tag': 'ello', 'server_name': 'lu.lux' } }, { 'fields': { 'some_stat': 156 }, 'measurement': 'events.stats.uk.london', 'tags': { 'other_tag': 'ello', 'server_name': 'uk.london' } }, { 'fields': { 'some_stat': 158 }, 'measurement': 'events.stats.fr.paris-10', 'tags': { 'other_tag': 'ello', 'server_name': 'fr.paris-10' } }, { 'fields': { 'some_stat': 159 }, 'measurement': 'events.stats.us.east-1', 'tags': { 'other_tag': 'ello', 'server_name': 'us.east-1' } } ] rcvd = TestSeriesHelper.MySeriesHelper._json_body_() self.assertTrue(all([el in expectation for el in rcvd]) and all([el in rcvd for el in expectation]), 'Invalid JSON body of time series returned from ' '_json_body_ for several series names: {0}.' .format(rcvd)) TestSeriesHelper.MySeriesHelper._reset_() self.assertEqual( TestSeriesHelper.MySeriesHelper._json_body_(), [], 'Resetting helper did not empty datapoints.') def testInvalidHelpers(self): ''' Tests errors in invalid helpers. ''' class MissingMeta(SeriesHelper): pass class MissingClient(SeriesHelper): class Meta: series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] autocommit = True class MissingSeriesName(SeriesHelper): class Meta: fields = ['time', 'server_name'] class MissingFields(SeriesHelper): class Meta: series_name = 'events.stats.{server_name}' for cls in [MissingMeta, MissingClient, MissingFields, MissingSeriesName]: self.assertRaises( AttributeError, cls, **{'time': 159, 'server_name': 'us.east-1'}) @unittest.skip("Fails on py32") def testWarnBulkSizeZero(self): """ Tests warning for an invalid bulk size. """ class WarnBulkSizeZero(SeriesHelper): class Meta: client = TestSeriesHelper.client series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] tags = [] bulk_size = 0 autocommit = True with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") try: WarnBulkSizeZero(time=159, server_name='us.east-1') except ConnectionError: # Server defined in the client is invalid, we're testing # the warning only. pass self.assertEqual(len(w), 1, '{0} call should have generated one warning.' .format(WarnBulkSizeZero)) self.assertIn('forced to 1', str(w[-1].message), 'Warning message did not contain "forced to 1".') def testWarnBulkSizeNoEffect(self): """ Tests warning for a set bulk size but autocommit False. """ class WarnBulkSizeNoEffect(SeriesHelper): class Meta: series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] bulk_size = 5 tags = [] autocommit = False with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") WarnBulkSizeNoEffect(time=159, server_name='us.east-1') self.assertEqual(len(w), 1, '{0} call should have generated one warning.' .format(WarnBulkSizeNoEffect)) self.assertIn('has no affect', str(w[-1].message), 'Warning message did not contain "has not affect".') influxdb-2.12.0/influxdb/influxdb08/0000755000175000017500000000000012652700261017776 5ustar reazemreazem00000000000000influxdb-2.12.0/influxdb/influxdb08/__init__.py0000644000175000017500000000033312652700251022105 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- from .client import InfluxDBClient from .dataframe_client import DataFrameClient from .helper import SeriesHelper __all__ = [ 'InfluxDBClient', 'DataFrameClient', 'SeriesHelper', ] influxdb-2.12.0/influxdb/influxdb08/dataframe_client.py0000644000175000017500000001527312652700251023641 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ DataFrame client for InfluxDB """ import math import warnings from .client import InfluxDBClient class DataFrameClient(InfluxDBClient): """ The ``DataFrameClient`` object holds information necessary to connect to InfluxDB. Requests can be made to InfluxDB directly through the client. The client reads and writes from pandas DataFrames. """ def __init__(self, ignore_nan=True, *args, **kwargs): super(DataFrameClient, self).__init__(*args, **kwargs) try: global pd import pandas as pd except ImportError as ex: raise ImportError('DataFrameClient requires Pandas, ' '"{ex}" problem importing'.format(ex=str(ex))) self.EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00') self.ignore_nan = ignore_nan def write_points(self, data, *args, **kwargs): """ Write to multiple time series names. :param data: A dictionary mapping series names to pandas DataFrames :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. :param batch_size: [Optional] Value to write the points in batches instead of all at one time. Useful for when doing data dumps from one database to another or when doing a massive write operation :type batch_size: int """ batch_size = kwargs.get('batch_size') time_precision = kwargs.get('time_precision', 's') if batch_size: kwargs.pop('batch_size') # don't hand over to InfluxDBClient for key, data_frame in data.items(): number_batches = int(math.ceil( len(data_frame) / float(batch_size))) for batch in range(number_batches): start_index = batch * batch_size end_index = (batch + 1) * batch_size data = [self._convert_dataframe_to_json( name=key, dataframe=data_frame.ix[start_index:end_index].copy(), time_precision=time_precision)] InfluxDBClient.write_points(self, data, *args, **kwargs) return True else: data = [self._convert_dataframe_to_json( name=key, dataframe=dataframe, time_precision=time_precision) for key, dataframe in data.items()] return InfluxDBClient.write_points(self, data, *args, **kwargs) def write_points_with_precision(self, data, time_precision='s'): """ DEPRECATED. Write to multiple time series names """ warnings.warn( "write_points_with_precision is deprecated, and will be removed " "in future versions. Please use " "``DataFrameClient.write_points(time_precision='..')`` instead.", FutureWarning) return self.write_points(data, time_precision='s') def query(self, query, time_precision='s', chunked=False): """ Quering data into DataFrames. Returns a DataFrame for a single time series and a map for multiple time series with the time series as value and its name as key. :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise. """ result = InfluxDBClient.query(self, query=query, time_precision=time_precision, chunked=chunked) if len(result) == 0: return result elif len(result) == 1: return self._to_dataframe(result[0], time_precision) else: ret = {} for time_series in result: ret[time_series['name']] = self._to_dataframe(time_series, time_precision) return ret def _to_dataframe(self, json_result, time_precision): dataframe = pd.DataFrame(data=json_result['points'], columns=json_result['columns']) if 'sequence_number' in dataframe.keys(): dataframe.sort_values(['time', 'sequence_number'], inplace=True) else: dataframe.sort_values(['time'], inplace=True) pandas_time_unit = time_precision if time_precision == 'm': pandas_time_unit = 'ms' elif time_precision == 'u': pandas_time_unit = 'us' dataframe.index = pd.to_datetime(list(dataframe['time']), unit=pandas_time_unit, utc=True) del dataframe['time'] return dataframe def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'): if not isinstance(dataframe, pd.DataFrame): raise TypeError('Must be DataFrame, but type was: {0}.' .format(type(dataframe))) if not (isinstance(dataframe.index, pd.tseries.period.PeriodIndex) or isinstance(dataframe.index, pd.tseries.index.DatetimeIndex)): raise TypeError('Must be DataFrame with DatetimeIndex or \ PeriodIndex.') dataframe.index = dataframe.index.to_datetime() if dataframe.index.tzinfo is None: dataframe.index = dataframe.index.tz_localize('UTC') dataframe['time'] = [self._datetime_to_epoch(dt, time_precision) for dt in dataframe.index] data = {'name': name, 'columns': [str(column) for column in dataframe.columns], 'points': [self._convert_array(x) for x in dataframe.values]} return data def _convert_array(self, array): try: global np import numpy as np except ImportError as ex: raise ImportError('DataFrameClient requires Numpy, ' '"{ex}" problem importing'.format(ex=str(ex))) if self.ignore_nan: number_types = (int, float, np.number) condition = (all(isinstance(el, number_types) for el in array) and np.isnan(array)) return list(np.where(condition, None, array)) else: return list(array) def _datetime_to_epoch(self, datetime, time_precision='s'): seconds = (datetime - self.EPOCH).total_seconds() if time_precision == 's': return seconds elif time_precision == 'm' or time_precision == 'ms': return seconds * 1000 elif time_precision == 'u': return seconds * 1000000 influxdb-2.12.0/influxdb/influxdb08/helper.py0000644000175000017500000001230212652700251021624 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ Helper class for InfluxDB """ from collections import namedtuple, defaultdict from warnings import warn import six class SeriesHelper(object): """ Subclassing this helper eases writing data points in bulk. All data points are immutable, insuring they do not get overwritten. Each subclass can write to its own database. The time series names can also be based on one or more defined fields. Annotated example:: class MySeriesHelper(SeriesHelper): class Meta: # Meta class stores time series helper configuration. series_name = 'events.stats.{server_name}' # Series name must be a string, curly brackets for dynamic use. fields = ['time', 'server_name'] # Defines all the fields in this time series. ### Following attributes are optional. ### client = TestSeriesHelper.client # Client should be an instance of InfluxDBClient. :warning: Only used if autocommit is True. bulk_size = 5 # Defines the number of data points to write simultaneously. # Only applicable if autocommit is True. autocommit = True # If True and no bulk_size, then will set bulk_size to 1. """ __initialized__ = False def __new__(cls, *args, **kwargs): """ Initializes class attributes for subsequent constructor calls. :note: *args and **kwargs are not explicitly used in this function, but needed for Python 2 compatibility. """ if not cls.__initialized__: cls.__initialized__ = True try: _meta = getattr(cls, 'Meta') except AttributeError: raise AttributeError( 'Missing Meta class in {0}.'.format( cls.__name__)) for attr in ['series_name', 'fields']: try: setattr(cls, '_' + attr, getattr(_meta, attr)) except AttributeError: raise AttributeError( 'Missing {0} in {1} Meta class.'.format( attr, cls.__name__)) cls._autocommit = getattr(_meta, 'autocommit', False) cls._client = getattr(_meta, 'client', None) if cls._autocommit and not cls._client: raise AttributeError( 'In {0}, autocommit is set to True, but no client is set.' .format(cls.__name__)) try: cls._bulk_size = getattr(_meta, 'bulk_size') if cls._bulk_size < 1 and cls._autocommit: warn( 'Definition of bulk_size in {0} forced to 1, ' 'was less than 1.'.format(cls.__name__)) cls._bulk_size = 1 except AttributeError: cls._bulk_size = -1 else: if not cls._autocommit: warn( 'Definition of bulk_size in {0} has no affect because' ' autocommit is false.'.format(cls.__name__)) cls._datapoints = defaultdict(list) cls._type = namedtuple(cls.__name__, cls._fields) return super(SeriesHelper, cls).__new__(cls) def __init__(self, **kw): """ Constructor call creates a new data point. All fields must be present. :note: Data points written when `bulk_size` is reached per Helper. :warning: Data points are *immutable* (`namedtuples`). """ cls = self.__class__ if sorted(cls._fields) != sorted(kw.keys()): raise NameError( 'Expected {0}, got {1}.'.format( cls._fields, kw.keys())) cls._datapoints[cls._series_name.format(**kw)].append(cls._type(**kw)) if cls._autocommit and \ sum(len(series) for series in cls._datapoints.values()) \ >= cls._bulk_size: cls.commit() @classmethod def commit(cls, client=None): """ Commit everything from datapoints via the client. :param client: InfluxDBClient instance for writing points to InfluxDB. :attention: any provided client will supersede the class client. :return: result of client.write_points. """ if not client: client = cls._client rtn = client.write_points(cls._json_body_()) cls._reset_() return rtn @classmethod def _json_body_(cls): """ :return: JSON body of these datapoints. """ json = [] for series_name, data in six.iteritems(cls._datapoints): json.append({'name': series_name, 'columns': cls._fields, 'points': [[getattr(point, k) for k in cls._fields] for point in data] }) return json @classmethod def _reset_(cls): """ Reset data storage. """ cls._datapoints = defaultdict(list) influxdb-2.12.0/influxdb/influxdb08/chunked_json.py0000644000175000017500000000070612652700251023024 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- # # Author: Adrian Sampson # Source: https://gist.github.com/sampsyo/920215 # import json _decoder = json.JSONDecoder() def loads(s): """A generator reading a sequence of JSON values from a string.""" while s: s = s.strip() obj, pos = _decoder.raw_decode(s) if not pos: raise ValueError('no JSON object found at %i' % pos) yield obj s = s[pos:] influxdb-2.12.0/influxdb/influxdb08/client.py0000644000175000017500000006320012652700251021626 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ Python client for InfluxDB """ import json import socket import requests import requests.exceptions import warnings from sys import version_info from influxdb import chunked_json try: xrange except NameError: xrange = range if version_info[0] == 3: from urllib.parse import urlparse else: from urlparse import urlparse session = requests.Session() class InfluxDBClientError(Exception): """Raised when an error occurs in the request""" def __init__(self, content, code=-1): super(InfluxDBClientError, self).__init__( "{0}: {1}".format(code, content)) self.content = content self.code = code class InfluxDBClient(object): """ The ``InfluxDBClient`` object holds information necessary to connect to InfluxDB. Requests can be made to InfluxDB directly through the client. :param host: hostname to connect to InfluxDB, defaults to 'localhost' :type host: string :param port: port to connect to InfluxDB, defaults to 'localhost' :type port: int :param username: user to connect, defaults to 'root' :type username: string :param password: password of the user, defaults to 'root' :type password: string :param database: database name to connect to, defaults is None :type database: string :param ssl: use https instead of http to connect to InfluxDB, defaults is False :type ssl: boolean :param verify_ssl: verify SSL certificates for HTTPS requests, defaults is False :type verify_ssl: boolean :param timeout: number of seconds Requests will wait for your client to establish a connection, defaults to None :type timeout: int :param use_udp: use UDP to connect to InfluxDB, defaults is False :type use_udp: int :param udp_port: UDP port to connect to InfluxDB, defaults is 4444 :type udp_port: int """ def __init__(self, host='localhost', port=8086, username='root', password='root', database=None, ssl=False, verify_ssl=False, timeout=None, use_udp=False, udp_port=4444): """ Construct a new InfluxDBClient object. """ self._host = host self._port = port self._username = username self._password = password self._database = database self._timeout = timeout self._verify_ssl = verify_ssl self.use_udp = use_udp self.udp_port = udp_port if use_udp: self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self._scheme = "http" if ssl is True: self._scheme = "https" self._baseurl = "{0}://{1}:{2}".format( self._scheme, self._host, self._port) self._headers = { 'Content-type': 'application/json', 'Accept': 'text/plain'} @staticmethod def from_DSN(dsn, **kwargs): """ Returns an instance of InfluxDBClient from the provided data source name. Supported schemes are "influxdb", "https+influxdb", "udp+influxdb". Parameters for the InfluxDBClient constructor may be also be passed to this function. Examples: >> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ ... localhost:8086/databasename', timeout=5) >> type(cli) >> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ ... localhost:8086/databasename', timeout=5, udp_port=159) >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 :param dsn: data source name :type dsn: string :param **kwargs: additional parameters for InfluxDBClient. :type **kwargs: dict :note: parameters provided in **kwargs may override dsn parameters. :note: when using "udp+influxdb" the specified port (if any) will be used for the TCP connection; specify the udp port with the additional udp_port parameter (cf. examples). :raise ValueError: if the provided DSN has any unexpected value. """ init_args = {} conn_params = urlparse(dsn) scheme_info = conn_params.scheme.split('+') if len(scheme_info) == 1: scheme = scheme_info[0] modifier = None else: modifier, scheme = scheme_info if scheme != 'influxdb': raise ValueError('Unknown scheme "{0}".'.format(scheme)) if modifier: if modifier == 'udp': init_args['use_udp'] = True elif modifier == 'https': init_args['ssl'] = True else: raise ValueError('Unknown modifier "{0}".'.format(modifier)) if conn_params.hostname: init_args['host'] = conn_params.hostname if conn_params.port: init_args['port'] = conn_params.port if conn_params.username: init_args['username'] = conn_params.username if conn_params.password: init_args['password'] = conn_params.password if conn_params.path and len(conn_params.path) > 1: init_args['database'] = conn_params.path[1:] init_args.update(kwargs) return InfluxDBClient(**init_args) # Change member variables def switch_database(self, database): """ switch_database() Change client database. :param database: the new database name to switch to :type database: string """ self._database = database def switch_db(self, database): """ DEPRECATED. Change client database. """ warnings.warn( "switch_db is deprecated, and will be removed " "in future versions. Please use " "``InfluxDBClient.switch_database(database)`` instead.", FutureWarning) return self.switch_database(database) def switch_user(self, username, password): """ switch_user() Change client username. :param username: the new username to switch to :type username: string :param password: the new password to switch to :type password: string """ self._username = username self._password = password def request(self, url, method='GET', params=None, data=None, expected_response_code=200): """ Make a http request to API """ url = "{0}/{1}".format(self._baseurl, url) if params is None: params = {} auth = { 'u': self._username, 'p': self._password } params.update(auth) if data is not None and not isinstance(data, str): data = json.dumps(data) # Try to send the request a maximum of three times. (see #103) # TODO (aviau): Make this configurable. for i in range(0, 3): try: response = session.request( method=method, url=url, params=params, data=data, headers=self._headers, verify=self._verify_ssl, timeout=self._timeout ) break except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: if i < 2: continue else: raise e if response.status_code == expected_response_code: return response else: raise InfluxDBClientError(response.content, response.status_code) def write(self, data): """ Provided as convenience for influxdb v0.9.0, this may change. """ self.request( url="write", method='POST', params=None, data=data, expected_response_code=200 ) return True # Writing Data # # Assuming you have a database named foo_production you can write data # by doing a POST to /db/foo_production/series?u=some_user&p=some_password # with a JSON body of points. def write_points(self, data, time_precision='s', *args, **kwargs): """ Write to multiple time series names. An example data blob is: data = [ { "points": [ [ 12 ] ], "name": "cpu_load_short", "columns": [ "value" ] } ] :param data: A list of dicts in InfluxDB 0.8.x data format. :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. :param batch_size: [Optional] Value to write the points in batches instead of all at one time. Useful for when doing data dumps from one database to another or when doing a massive write operation :type batch_size: int """ def list_chunks(l, n): """ Yield successive n-sized chunks from l. """ for i in xrange(0, len(l), n): yield l[i:i + n] batch_size = kwargs.get('batch_size') if batch_size and batch_size > 0: for item in data: name = item.get('name') columns = item.get('columns') point_list = item.get('points', []) for batch in list_chunks(point_list, batch_size): item = [{ "points": batch, "name": name, "columns": columns }] self._write_points( data=item, time_precision=time_precision) return True else: return self._write_points(data=data, time_precision=time_precision) def write_points_with_precision(self, data, time_precision='s'): """ DEPRECATED. Write to multiple time series names """ warnings.warn( "write_points_with_precision is deprecated, and will be removed " "in future versions. Please use " "``InfluxDBClient.write_points(time_precision='..')`` instead.", FutureWarning) return self._write_points(data=data, time_precision=time_precision) def _write_points(self, data, time_precision): if time_precision not in ['s', 'm', 'ms', 'u']: raise Exception( "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") if self.use_udp and time_precision != 's': raise Exception( "InfluxDB only supports seconds precision for udp writes" ) url = "db/{0}/series".format(self._database) params = { 'time_precision': time_precision } if self.use_udp: self.send_packet(data) else: self.request( url=url, method='POST', params=params, data=data, expected_response_code=200 ) return True # One Time Deletes def delete_points(self, name): """ Delete an entire series """ url = "db/{0}/series/{1}".format(self._database, name) self.request( url=url, method='DELETE', expected_response_code=204 ) return True # Regularly Scheduled Deletes def create_scheduled_delete(self, json_body): """ TODO: Create scheduled delete 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 """ raise NotImplementedError() # get list of deletes # curl http://localhost:8086/db/site_dev/scheduled_deletes # # remove a regularly scheduled delete # curl -X DELETE http://localhost:8086/db/site_dev/scheduled_deletes/:id def get_list_scheduled_delete(self): """ TODO: Get list of scheduled deletes 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 """ raise NotImplementedError() def remove_scheduled_delete(self, delete_id): """ TODO: Remove scheduled delete 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 """ raise NotImplementedError() def query(self, query, time_precision='s', chunked=False): """ Quering data :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise. """ return self._query(query, time_precision=time_precision, chunked=chunked) # Querying Data # # GET db/:name/series. It takes five parameters def _query(self, query, time_precision='s', chunked=False): if time_precision not in ['s', 'm', 'ms', 'u']: raise Exception( "Invalid time precision is given. (use 's', 'm', 'ms' or 'u')") if chunked is True: chunked_param = 'true' else: chunked_param = 'false' # Build the URL of the serie to query url = "db/{0}/series".format(self._database) params = { 'q': query, 'time_precision': time_precision, 'chunked': chunked_param } response = self.request( url=url, method='GET', params=params, expected_response_code=200 ) if chunked: decoded = {} try: decoded = chunked_json.loads(response.content.decode()) except UnicodeDecodeError: decoded = chunked_json.loads(response.content.decode('utf-8')) finally: return list(decoded) else: return response.json() # Creating and Dropping Databases # # ### create a database # curl -X POST http://localhost:8086/db -d '{"name": "site_development"}' # # ### drop a database # curl -X DELETE http://localhost:8086/db/site_development def create_database(self, database): """ create_database() Create a database on the InfluxDB server. :param database: the name of the database to create :type database: string :rtype: boolean """ url = "db" data = {'name': database} self.request( url=url, method='POST', data=data, expected_response_code=201 ) return True def delete_database(self, database): """ delete_database() Drop a database on the InfluxDB server. :param database: the name of the database to delete :type database: string :rtype: boolean """ url = "db/{0}".format(database) self.request( url=url, method='DELETE', expected_response_code=204 ) return True # ### get list of databases # curl -X GET http://localhost:8086/db def get_list_database(self): """ Get the list of databases """ url = "db" response = self.request( url=url, method='GET', expected_response_code=200 ) return response.json() def get_database_list(self): """ DEPRECATED. Get the list of databases """ warnings.warn( "get_database_list is deprecated, and will be removed " "in future versions. Please use " "``InfluxDBClient.get_list_database`` instead.", FutureWarning) return self.get_list_database() def delete_series(self, series): """ delete_series() Drop a series on the InfluxDB server. :param series: the name of the series to delete :type series: string :rtype: boolean """ url = "db/{0}/series/{1}".format( self._database, series ) self.request( url=url, method='DELETE', expected_response_code=204 ) return True def get_list_series(self): """ Get a list of all time series in a database """ response = self._query('list series') series_list = [] for series in response[0]['points']: series_list.append(series[1]) return series_list def get_list_continuous_queries(self): """ Get a list of continuous queries """ response = self._query('list continuous queries') queries_list = [] for query in response[0]['points']: queries_list.append(query[2]) return queries_list # Security # get list of cluster admins # curl http://localhost:8086/cluster_admins?u=root&p=root # add cluster admin # curl -X POST http://localhost:8086/cluster_admins?u=root&p=root \ # -d '{"name": "paul", "password": "i write teh docz"}' # update cluster admin password # curl -X POST http://localhost:8086/cluster_admins/paul?u=root&p=root \ # -d '{"password": "new pass"}' # delete cluster admin # curl -X DELETE http://localhost:8086/cluster_admins/paul?u=root&p=root # Database admins, with a database name of site_dev # get list of database admins # curl http://localhost:8086/db/site_dev/admins?u=root&p=root # add database admin # curl -X POST http://localhost:8086/db/site_dev/admins?u=root&p=root \ # -d '{"name": "paul", "password": "i write teh docz"}' # update database admin password # curl -X POST http://localhost:8086/db/site_dev/admins/paul?u=root&p=root\ # -d '{"password": "new pass"}' # delete database admin # curl -X DELETE \ # http://localhost:8086/db/site_dev/admins/paul?u=root&p=root def get_list_cluster_admins(self): """ Get list of cluster admins """ response = self.request( url="cluster_admins", method='GET', expected_response_code=200 ) return response.json() def add_cluster_admin(self, new_username, new_password): """ Add cluster admin """ data = { 'name': new_username, 'password': new_password } self.request( url="cluster_admins", method='POST', data=data, expected_response_code=200 ) return True def update_cluster_admin_password(self, username, new_password): """ Update cluster admin password """ url = "cluster_admins/{0}".format(username) data = { 'password': new_password } self.request( url=url, method='POST', data=data, expected_response_code=200 ) return True def delete_cluster_admin(self, username): """ Delete cluster admin """ url = "cluster_admins/{0}".format(username) self.request( url=url, method='DELETE', expected_response_code=200 ) return True def set_database_admin(self, username): """ Set user as database admin """ return self.alter_database_admin(username, True) def unset_database_admin(self, username): """ Unset user as database admin """ return self.alter_database_admin(username, False) def alter_database_admin(self, username, is_admin): url = "db/{0}/users/{1}".format(self._database, username) data = {'admin': is_admin} self.request( url=url, method='POST', data=data, expected_response_code=200 ) return True def get_list_database_admins(self): """ TODO: Get list of database admins 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 """ raise NotImplementedError() def add_database_admin(self, new_username, new_password): """ TODO: Add cluster admin 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 """ raise NotImplementedError() def update_database_admin_password(self, username, new_password): """ TODO: Update database admin password 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 """ raise NotImplementedError() def delete_database_admin(self, username): """ TODO: Delete database admin 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 """ raise NotImplementedError() ### # Limiting User Access # Database users # get list of database users # curl http://localhost:8086/db/site_dev/users?u=root&p=root # add database user # curl -X POST http://localhost:8086/db/site_dev/users?u=root&p=root \ # -d '{"name": "paul", "password": "i write teh docz"}' # update database user password # curl -X POST http://localhost:8086/db/site_dev/users/paul?u=root&p=root \ # -d '{"password": "new pass"}' # delete database user # curl -X DELETE http://localhost:8086/db/site_dev/users/paul?u=root&p=root def get_database_users(self): """ Get list of database users """ url = "db/{0}/users".format(self._database) response = self.request( url=url, method='GET', expected_response_code=200 ) return response.json() def add_database_user(self, new_username, new_password, permissions=None): """ Add database user :param permissions: A ``(readFrom, writeTo)`` tuple """ url = "db/{0}/users".format(self._database) data = { 'name': new_username, 'password': new_password } if permissions: try: data['readFrom'], data['writeTo'] = permissions except (ValueError, TypeError): raise TypeError( "'permissions' must be (readFrom, writeTo) tuple" ) self.request( url=url, method='POST', data=data, expected_response_code=200 ) return True def update_database_user_password(self, username, new_password): """ Update password """ return self.alter_database_user(username, new_password) def alter_database_user(self, username, password=None, permissions=None): """ Alters a database user and/or their permissions. :param permissions: A ``(readFrom, writeTo)`` tuple :raise TypeError: if permissions cannot be read. :raise ValueError: if neither password nor permissions provided. """ url = "db/{0}/users/{1}".format(self._database, username) if not password and not permissions: raise ValueError("Nothing to alter for user {0}.".format(username)) data = {} if password: data['password'] = password if permissions: try: data['readFrom'], data['writeTo'] = permissions except (ValueError, TypeError): raise TypeError( "'permissions' must be (readFrom, writeTo) tuple" ) self.request( url=url, method='POST', data=data, expected_response_code=200 ) if username == self._username: self._password = password return True def delete_database_user(self, username): """ Delete database user """ url = "db/{0}/users/{1}".format(self._database, username) self.request( url=url, method='DELETE', expected_response_code=200 ) return True # update the user by POSTing to db/site_dev/users/paul def update_permission(self, username, json_body): """ TODO: Update read/write permission 2013-11-08: This endpoint has not been implemented yet in ver0.0.8, but it is documented in http://influxdb.org/docs/api/http.html. See also: src/api/http/api.go:l57 """ raise NotImplementedError() def send_packet(self, packet): data = json.dumps(packet) byte = data.encode('utf-8') self.udp_socket.sendto(byte, (self._host, self.udp_port)) influxdb-2.12.0/influxdb/dataframe_client.py0000644000175000017500000000073012652700251021646 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ DataFrame client for InfluxDB """ __all__ = ['DataFrameClient'] try: import pandas del pandas except ImportError as err: from .client import InfluxDBClient class DataFrameClient(InfluxDBClient): def __init__(self, *a, **kw): raise ImportError("DataFrameClient requires Pandas " "which couldn't be imported: %s" % err) else: from ._dataframe_client import DataFrameClient influxdb-2.12.0/influxdb/helper.py0000644000175000017500000001267612652700251017657 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ Helper class for InfluxDB """ from collections import namedtuple, defaultdict from warnings import warn import six class SeriesHelper(object): """ Subclassing this helper eases writing data points in bulk. All data points are immutable, insuring they do not get overwritten. Each subclass can write to its own database. The time series names can also be based on one or more defined fields. Annotated example:: class MySeriesHelper(SeriesHelper): class Meta: # Meta class stores time series helper configuration. series_name = 'events.stats.{server_name}' # Series name must be a string, curly brackets for dynamic use. fields = ['time', 'server_name'] # Defines all the fields in this time series. ### Following attributes are optional. ### client = TestSeriesHelper.client # Client should be an instance of InfluxDBClient. :warning: Only used if autocommit is True. bulk_size = 5 # Defines the number of data points to write simultaneously. # Only applicable if autocommit is True. autocommit = True # If True and no bulk_size, then will set bulk_size to 1. """ __initialized__ = False def __new__(cls, *args, **kwargs): """ Initializes class attributes for subsequent constructor calls. :note: *args and **kwargs are not explicitly used in this function, but needed for Python 2 compatibility. """ if not cls.__initialized__: cls.__initialized__ = True try: _meta = getattr(cls, 'Meta') except AttributeError: raise AttributeError( 'Missing Meta class in {0}.'.format( cls.__name__)) for attr in ['series_name', 'fields', 'tags']: try: setattr(cls, '_' + attr, getattr(_meta, attr)) except AttributeError: raise AttributeError( 'Missing {0} in {1} Meta class.'.format( attr, cls.__name__)) cls._autocommit = getattr(_meta, 'autocommit', False) cls._client = getattr(_meta, 'client', None) if cls._autocommit and not cls._client: raise AttributeError( 'In {0}, autocommit is set to True, but no client is set.' .format(cls.__name__)) try: cls._bulk_size = getattr(_meta, 'bulk_size') if cls._bulk_size < 1 and cls._autocommit: warn( 'Definition of bulk_size in {0} forced to 1, ' 'was less than 1.'.format(cls.__name__)) cls._bulk_size = 1 except AttributeError: cls._bulk_size = -1 else: if not cls._autocommit: warn( 'Definition of bulk_size in {0} has no affect because' ' autocommit is false.'.format(cls.__name__)) cls._datapoints = defaultdict(list) cls._type = namedtuple(cls.__name__, cls._fields + cls._tags) return super(SeriesHelper, cls).__new__(cls) def __init__(self, **kw): """ Constructor call creates a new data point. All fields must be present. :note: Data points written when `bulk_size` is reached per Helper. :warning: Data points are *immutable* (`namedtuples`). """ cls = self.__class__ if sorted(cls._fields + cls._tags) != sorted(kw.keys()): raise NameError( 'Expected {0}, got {1}.'.format( sorted(cls._fields + cls._tags), kw.keys())) cls._datapoints[cls._series_name.format(**kw)].append(cls._type(**kw)) if cls._autocommit and \ sum(len(series) for series in cls._datapoints.values()) \ >= cls._bulk_size: cls.commit() @classmethod def commit(cls, client=None): """ Commit everything from datapoints via the client. :param client: InfluxDBClient instance for writing points to InfluxDB. :attention: any provided client will supersede the class client. :return: result of client.write_points. """ if not client: client = cls._client rtn = client.write_points(cls._json_body_()) cls._reset_() return rtn @classmethod def _json_body_(cls): """ :return: JSON body of these datapoints. """ json = [] for series_name, data in six.iteritems(cls._datapoints): for point in data: json_point = { "measurement": series_name, "fields": {}, "tags": {}, } for field in cls._fields: json_point['fields'][field] = getattr(point, field) for tag in cls._tags: json_point['tags'][tag] = getattr(point, tag) json.append(json_point) return json @classmethod def _reset_(cls): """ Reset data storage. """ cls._datapoints = defaultdict(list) influxdb-2.12.0/influxdb/chunked_json.py0000644000175000017500000000070612652700251021041 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- # # Author: Adrian Sampson # Source: https://gist.github.com/sampsyo/920215 # import json _decoder = json.JSONDecoder() def loads(s): """A generator reading a sequence of JSON values from a string.""" while s: s = s.strip() obj, pos = _decoder.raw_decode(s) if not pos: raise ValueError('no JSON object found at %i' % pos) yield obj s = s[pos:] influxdb-2.12.0/influxdb/client.py0000644000175000017500000007761312652700251017660 0ustar reazemreazem00000000000000# -*- coding: utf-8 -*- """ Python client for InfluxDB """ from functools import wraps import json import socket import time import threading import random import requests import requests.exceptions from sys import version_info from influxdb.line_protocol import make_lines from influxdb.resultset import ResultSet from .exceptions import InfluxDBClientError from .exceptions import InfluxDBServerError try: xrange except NameError: xrange = range if version_info[0] == 3: from urllib.parse import urlparse else: from urlparse import urlparse class InfluxDBClient(object): """The :class:`~.InfluxDBClient` object holds information necessary to connect to InfluxDB. Requests can be made to InfluxDB directly through the client. :param host: hostname to connect to InfluxDB, defaults to 'localhost' :type host: str :param port: port to connect to InfluxDB, defaults to 8086 :type port: int :param username: user to connect, defaults to 'root' :type username: str :param password: password of the user, defaults to 'root' :type password: str :param database: database name to connect to, defaults to None :type database: str :param ssl: use https instead of http to connect to InfluxDB, defaults to False :type ssl: bool :param verify_ssl: verify SSL certificates for HTTPS requests, defaults to False :type verify_ssl: bool :param timeout: number of seconds Requests will wait for your client to establish a connection, defaults to None :type timeout: int :param use_udp: use UDP to connect to InfluxDB, defaults to False :type use_udp: int :param udp_port: UDP port to connect to InfluxDB, defaults to 4444 :type udp_port: int :param proxies: HTTP(S) proxy to use for Requests, defaults to {} :type proxies: dict """ def __init__(self, host='localhost', port=8086, username='root', password='root', database=None, ssl=False, verify_ssl=False, timeout=None, use_udp=False, udp_port=4444, proxies=None, ): """Construct a new InfluxDBClient object.""" self.__host = host self._port = port self._username = username self._password = password self._database = database self._timeout = timeout self._verify_ssl = verify_ssl self.use_udp = use_udp self.udp_port = udp_port self._session = requests.Session() if use_udp: self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self._scheme = "http" if ssl is True: self._scheme = "https" if proxies is None: self._proxies = {} else: self._proxies = proxies self.__baseurl = "{0}://{1}:{2}".format( self._scheme, self._host, self._port) self._headers = { 'Content-type': 'application/json', 'Accept': 'text/plain' } # _baseurl and _host are properties to allow InfluxDBClusterClient # to override them with thread-local variables @property def _baseurl(self): return self._get_baseurl() def _get_baseurl(self): return self.__baseurl @property def _host(self): return self._get_host() def _get_host(self): return self.__host @staticmethod def from_DSN(dsn, **kwargs): """Return an instance of :class:`~.InfluxDBClient` from the provided data source name. Supported schemes are "influxdb", "https+influxdb" and "udp+influxdb". Parameters for the :class:`~.InfluxDBClient` constructor may also be passed to this method. :param dsn: data source name :type dsn: string :param kwargs: additional parameters for `InfluxDBClient` :type kwargs: dict :raises ValueError: if the provided DSN has any unexpected values :Example: :: >> cli = InfluxDBClient.from_DSN('influxdb://username:password@\ localhost:8086/databasename', timeout=5) >> type(cli) >> cli = InfluxDBClient.from_DSN('udp+influxdb://username:pass@\ localhost:8086/databasename', timeout=5, udp_port=159) >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 .. note:: parameters provided in `**kwargs` may override dsn parameters .. note:: when using "udp+influxdb" the specified port (if any) will be used for the TCP connection; specify the UDP port with the additional `udp_port` parameter (cf. examples). """ init_args = parse_dsn(dsn) host, port = init_args.pop('hosts')[0] init_args['host'] = host init_args['port'] = port init_args.update(kwargs) return InfluxDBClient(**init_args) def switch_database(self, database): """Change the client's database. :param database: the name of the database to switch to :type database: str """ self._database = database def switch_user(self, username, password): """Change the client's username. :param username: the username to switch to :type username: str :param password: the password for the username :type password: str """ self._username = username self._password = password def request(self, url, method='GET', params=None, data=None, expected_response_code=200, headers=None): """Make a HTTP request to the InfluxDB API. :param url: the path of the HTTP request, e.g. write, query, etc. :type url: str :param method: the HTTP method for the request, defaults to GET :type method: str :param params: additional parameters for the request, defaults to None :type params: dict :param data: the data of the request, defaults to None :type data: str :param expected_response_code: the expected response code of the request, defaults to 200 :type expected_response_code: int :returns: the response from the request :rtype: :class:`requests.Response` :raises InfluxDBServerError: if the response code is any server error code (5xx) :raises InfluxDBClientError: if the response code is not the same as `expected_response_code` and is not a server error code """ url = "{0}/{1}".format(self._baseurl, url) if headers is None: headers = self._headers if params is None: params = {} if isinstance(data, (dict, list)): data = json.dumps(data) # Try to send the request a maximum of three times. (see #103) # TODO (aviau): Make this configurable. for i in range(0, 3): try: response = self._session.request( method=method, url=url, auth=(self._username, self._password), params=params, data=data, headers=headers, proxies=self._proxies, verify=self._verify_ssl, timeout=self._timeout ) break except requests.exceptions.ConnectionError as e: if i < 2: continue else: raise e if response.status_code >= 500 and response.status_code < 600: raise InfluxDBServerError(response.content) elif response.status_code == expected_response_code: return response else: raise InfluxDBClientError(response.content, response.status_code) def write(self, data, params=None, expected_response_code=204): """Write data to InfluxDB. :param data: the data to be written :type data: dict :param params: additional parameters for the request, defaults to None :type params: dict :param expected_response_code: the expected response code of the write operation, defaults to 204 :type expected_response_code: int :returns: True, if the write operation is successful :rtype: bool """ headers = self._headers headers['Content-type'] = 'application/octet-stream' if params: precision = params.get('precision') else: precision = None self.request( url="write", method='POST', params=params, data=make_lines(data, precision).encode('utf-8'), expected_response_code=expected_response_code, headers=headers ) return True def query(self, query, params=None, epoch=None, expected_response_code=200, database=None, raise_errors=True): """Send a query to InfluxDB. :param query: the actual query string :type query: str :param params: additional parameters for the request, defaults to {} :type params: dict :param expected_response_code: the expected status code of response, defaults to 200 :type expected_response_code: int :param database: database to query, defaults to None :type database: str :param raise_errors: Whether or not to raise exceptions when InfluxDB returns errors, defaults to True :type raise_errors: bool :returns: the queried data :rtype: :class:`~.ResultSet` """ if params is None: params = {} params['q'] = query params['db'] = database or self._database if epoch is not None: params['epoch'] = epoch response = self.request( url="query", method='GET', params=params, data=None, expected_response_code=expected_response_code ) data = response.json() results = [ ResultSet(result, raise_errors=raise_errors) for result in data.get('results', []) ] # TODO(aviau): Always return a list. (This would be a breaking change) if len(results) == 1: return results[0] else: return results def write_points(self, points, time_precision=None, database=None, retention_policy=None, tags=None, batch_size=None, ): """Write to multiple time series names. :param points: the list of points to be written in the database :type points: list of dictionaries, each dictionary represents a point :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None :type time_precision: str :param database: the database to write the points to. Defaults to the client's current database :type database: str :param tags: a set of key-value pairs associated with each point. Both keys and values must be strings. These are shared tags and will be merged with point-specific tags, defaults to None :type tags: dict :param retention_policy: the retention policy for the points. Defaults to None :type retention_policy: str :param batch_size: value to write the points in batches instead of all at one time. Useful for when doing data dumps from one database to another or when doing a massive write operation, defaults to None :type batch_size: int :returns: True, if the operation is successful :rtype: bool .. note:: if no retention policy is specified, the default retention policy for the database is used """ if batch_size and batch_size > 0: for batch in self._batches(points, batch_size): self._write_points(points=batch, time_precision=time_precision, database=database, retention_policy=retention_policy, tags=tags) return True else: return self._write_points(points=points, time_precision=time_precision, database=database, retention_policy=retention_policy, tags=tags) def _batches(self, iterable, size): for i in xrange(0, len(iterable), size): yield iterable[i:i + size] def _write_points(self, points, time_precision, database, retention_policy, tags): if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]: raise ValueError( "Invalid time precision is given. " "(use 'n', 'u', 'ms', 's', 'm' or 'h')") if self.use_udp and time_precision and time_precision != 's': raise ValueError( "InfluxDB only supports seconds precision for udp writes" ) data = { 'points': points } if tags is not None: data['tags'] = tags params = { 'db': database or self._database } if time_precision is not None: params['precision'] = time_precision if retention_policy is not None: params['rp'] = retention_policy if self.use_udp: self.send_packet(data) else: self.write( data=data, params=params, expected_response_code=204 ) return True def get_list_database(self): """Get the list of databases in InfluxDB. :returns: all databases in InfluxDB :rtype: list of dictionaries :Example: :: >> dbs = client.get_list_database() >> dbs [{u'name': u'db1'}, {u'name': u'db2'}, {u'name': u'db3'}] """ return list(self.query("SHOW DATABASES").get_points()) def create_database(self, dbname, if_not_exists=False): """Create a new database in InfluxDB. :param dbname: the name of the database to create :type dbname: str """ if if_not_exists: self.query("CREATE DATABASE IF NOT EXISTS \"%s\"" % dbname) else: self.query("CREATE DATABASE \"%s\"" % dbname) def drop_database(self, dbname): """Drop a database from InfluxDB. :param dbname: the name of the database to drop :type dbname: str """ self.query("DROP DATABASE \"%s\"" % dbname) def create_retention_policy(self, name, duration, replication, database=None, default=False): """Create a retention policy for a database. :param name: the name of the new retention policy :type name: str :param duration: the duration of the new retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. For infinite retention – meaning the data will never be deleted – use 'INF' for duration. The minimum retention period is 1 hour. :type duration: str :param replication: the replication of the retention policy :type replication: str :param database: the database for which the retention policy is created. Defaults to current client's database :type database: str :param default: whether or not to set the policy as default :type default: bool """ query_string = \ "CREATE RETENTION POLICY %s ON %s " \ "DURATION %s REPLICATION %s" % \ (name, database or self._database, duration, replication) if default is True: query_string += " DEFAULT" self.query(query_string) def alter_retention_policy(self, name, database=None, duration=None, replication=None, default=None): """Mofidy an existing retention policy for a database. :param name: the name of the retention policy to modify :type name: str :param database: the database for which the retention policy is modified. Defaults to current client's database :type database: str :param duration: the new duration of the existing retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. For infinite retention – meaning the data will never be deleted – use 'INF' for duration. The minimum retention period is 1 hour. :type duration: str :param replication: the new replication of the existing retention policy :type replication: str :param default: whether or not to set the modified policy as default :type default: bool .. note:: at least one of duration, replication, or default flag should be set. Otherwise the operation will fail. """ query_string = ( "ALTER RETENTION POLICY {0} ON {1}" ).format(name, database or self._database) if duration: query_string += " DURATION {0}".format(duration) if replication: query_string += " REPLICATION {0}".format(replication) if default is True: query_string += " DEFAULT" self.query(query_string) def get_list_retention_policies(self, database=None): """Get the list of retention policies for a database. :param database: the name of the database, defaults to the client's current database :type database: str :returns: all retention policies for the database :rtype: list of dictionaries :Example: :: >> ret_policies = client.get_list_retention_policies('my_db') >> ret_policies [{u'default': True, u'duration': u'0', u'name': u'default', u'replicaN': 1}] """ rsp = self.query( "SHOW RETENTION POLICIES ON %s" % (database or self._database) ) return list(rsp.get_points()) def get_list_series(self, database=None): """Get the list of series for a database. :param database: the name of the database, defaults to the client's current database :type database: str :returns: all series in the specified database :rtype: list of dictionaries :Example: >> series = client.get_list_series('my_database') >> series [{'name': u'cpu_usage', 'tags': [{u'_id': 1, u'host': u'server01', u'region': u'us-west'}]}] """ rsp = self.query("SHOW SERIES", database=database) series = [] for serie in rsp.items(): series.append( { "name": serie[0][0], "tags": list(serie[1]) } ) return series def get_list_servers(self): """Get the list of servers in InfluxDB cluster. :returns: all nodes in InfluxDB cluster :rtype: list of dictionaries :Example: :: >> servers = client.get_list_servers() >> servers [{'cluster_addr': 'server01:8088', 'id': 1, 'raft': True, 'raft-leader': True}] """ return list(self.query("SHOW SERVERS").get_points()) def get_list_users(self): """Get the list of all users in InfluxDB. :returns: all users in InfluxDB :rtype: list of dictionaries :Example: :: >> users = client.get_list_users() >> users [{u'admin': True, u'user': u'user1'}, {u'admin': False, u'user': u'user2'}, {u'admin': False, u'user': u'user3'}] """ return list(self.query("SHOW USERS").get_points()) def create_user(self, username, password, admin=False): """Create a new user in InfluxDB :param username: the new username to create :type username: str :param password: the password for the new user :type password: str :param admin: whether the user should have cluster administration privileges or not :type admin: boolean """ text = "CREATE USER {0} WITH PASSWORD '{1}'".format(username, password) if admin: text += ' WITH ALL PRIVILEGES' self.query(text) def drop_user(self, username): """Drop an user from InfluxDB. :param username: the username to drop :type username: str """ text = "DROP USER {0}".format(username) self.query(text) def set_user_password(self, username, password): """Change the password of an existing user. :param username: the username who's password is being changed :type username: str :param password: the new password for the user :type password: str """ text = "SET PASSWORD FOR {0} = '{1}'".format(username, password) self.query(text) def delete_series(self, database=None, measurement=None, tags=None): """Delete series from a database. Series can be filtered by measurement and tags. :param measurement: Delete all series from a measurement :type id: string :param tags: Delete all series that match given tags :type id: dict :param database: the database from which the series should be deleted, defaults to client's current database :type database: str """ database = database or self._database query_str = 'DROP SERIES' if measurement: query_str += ' FROM "{0}"'.format(measurement) if tags: query_str += ' WHERE ' + ' and '.join(["{0}='{1}'".format(k, v) for k, v in tags.items()]) self.query(query_str, database=database) def revoke_admin_privileges(self, username): """Revoke cluster administration privileges from an user. :param username: the username to revoke privileges from :type username: str .. note:: Only a cluster administrator can create/ drop databases and manage users. """ text = "REVOKE ALL PRIVILEGES FROM {0}".format(username) self.query(text) def grant_privilege(self, privilege, database, username): """Grant a privilege on a database to an user. :param privilege: the privilege to grant, one of 'read', 'write' or 'all'. The string is case-insensitive :type privilege: str :param database: the database to grant the privilege on :type database: str :param username: the username to grant the privilege to :type username: str """ text = "GRANT {0} ON {1} TO {2}".format(privilege, database, username) self.query(text) def revoke_privilege(self, privilege, database, username): """Revoke a privilege on a database from an user. :param privilege: the privilege to revoke, one of 'read', 'write' or 'all'. The string is case-insensitive :type privilege: str :param database: the database to revoke the privilege on :type database: str :param username: the username to revoke the privilege from :type username: str """ text = "REVOKE {0} ON {1} FROM {2}".format(privilege, database, username) self.query(text) def send_packet(self, packet): """Send an UDP packet. :param packet: the packet to be sent :type packet: dict """ data = make_lines(packet).encode('utf-8') self.udp_socket.sendto(data, (self._host, self.udp_port)) class InfluxDBClusterClient(object): """The :class:`~.InfluxDBClusterClient` is the client for connecting to a cluster of InfluxDB servers. Each query hits different host from the list of hosts. :param hosts: all hosts to be included in the cluster, each of which should be in the format (address, port), e.g. [('127.0.0.1', 8086), ('127.0.0.1', 9096)]. Defaults to [('localhost', 8086)] :type hosts: list of tuples :param shuffle: whether the queries should hit servers evenly(randomly), defaults to True :type shuffle: bool :param client_base_class: the base class for the cluster client. This parameter is used to enable the support of different client types. Defaults to :class:`~.InfluxDBClient` :param healing_delay: the delay in seconds, counting from last failure of a server, before re-adding server to the list of working servers. Defaults to 15 minutes (900 seconds) """ def __init__(self, hosts=[('localhost', 8086)], username='root', password='root', database=None, ssl=False, verify_ssl=False, timeout=None, use_udp=False, udp_port=4444, shuffle=True, client_base_class=InfluxDBClient, healing_delay=900, ): self.clients = [self] # Keep it backwards compatible self.hosts = hosts self.bad_hosts = [] # Corresponding server has failures in history self.shuffle = shuffle self.healing_delay = healing_delay self._last_healing = time.time() host, port = self.hosts[0] self._hosts_lock = threading.Lock() self._thread_local = threading.local() self._client = client_base_class(host=host, port=port, username=username, password=password, database=database, ssl=ssl, verify_ssl=verify_ssl, timeout=timeout, use_udp=use_udp, udp_port=udp_port) for method in dir(client_base_class): orig_attr = getattr(client_base_class, method, '') if method.startswith('_') or not callable(orig_attr): continue setattr(self, method, self._make_func(orig_attr)) self._client._get_host = self._get_host self._client._get_baseurl = self._get_baseurl self._update_client_host(self.hosts[0]) @staticmethod def from_DSN(dsn, client_base_class=InfluxDBClient, shuffle=True, **kwargs): """Same as :meth:`~.InfluxDBClient.from_DSN`, but supports multiple servers. :param shuffle: whether the queries should hit servers evenly(randomly), defaults to True :type shuffle: bool :param client_base_class: the base class for all clients in the cluster. This parameter is used to enable the support of different client types. Defaults to :class:`~.InfluxDBClient` :Example: :: >> cluster = InfluxDBClusterClient.from_DSN('influxdb://usr:pwd\ @host1:8086,usr:pwd@host2:8086/db_name', timeout=5) >> type(cluster) >> cluster.hosts [('host1', 8086), ('host2', 8086)] >> cluster._client ] """ init_args = parse_dsn(dsn) init_args.update(**kwargs) init_args['shuffle'] = shuffle init_args['client_base_class'] = client_base_class cluster_client = InfluxDBClusterClient(**init_args) return cluster_client def _update_client_host(self, host): self._thread_local.host, self._thread_local.port = host self._thread_local.baseurl = "{0}://{1}:{2}".format( self._client._scheme, self._client._host, self._client._port ) def _get_baseurl(self): return self._thread_local.baseurl def _get_host(self): return self._thread_local.host def _make_func(self, orig_func): @wraps(orig_func) def func(*args, **kwargs): now = time.time() with self._hosts_lock: if (self.bad_hosts and self._last_healing + self.healing_delay < now): h = self.bad_hosts.pop(0) self.hosts.append(h) self._last_healing = now if self.shuffle: random.shuffle(self.hosts) hosts = self.hosts + self.bad_hosts for h in hosts: bad_host = False try: self._update_client_host(h) return orig_func(self._client, *args, **kwargs) except InfluxDBClientError as e: # Errors caused by user's requests, re-raise raise e except Exception as e: # Errors that might caused by server failure, try another bad_host = True with self._hosts_lock: if h in self.hosts: self.hosts.remove(h) self.bad_hosts.append(h) self._last_healing = now finally: with self._hosts_lock: if not bad_host and h in self.bad_hosts: self.bad_hosts.remove(h) self.hosts.append(h) raise InfluxDBServerError("InfluxDB: no viable server!") return func def parse_dsn(dsn): conn_params = urlparse(dsn) init_args = {} scheme_info = conn_params.scheme.split('+') if len(scheme_info) == 1: scheme = scheme_info[0] modifier = None else: modifier, scheme = scheme_info if scheme != 'influxdb': raise ValueError('Unknown scheme "{0}".'.format(scheme)) if modifier: if modifier == 'udp': init_args['use_udp'] = True elif modifier == 'https': init_args['ssl'] = True else: raise ValueError('Unknown modifier "{0}".'.format(modifier)) netlocs = conn_params.netloc.split(',') init_args['hosts'] = [] for netloc in netlocs: parsed = _parse_netloc(netloc) init_args['hosts'].append((parsed['host'], int(parsed['port']))) init_args['username'] = parsed['username'] init_args['password'] = parsed['password'] if conn_params.path and len(conn_params.path) > 1: init_args['database'] = conn_params.path[1:] return init_args def _parse_netloc(netloc): info = urlparse("http://{0}".format(netloc)) return {'username': info.username or None, 'password': info.password or None, 'host': info.hostname or 'localhost', 'port': info.port or 8086} influxdb-2.12.0/setup.cfg0000644000175000017500000000012212652700261016014 0ustar reazemreazem00000000000000[wheel] universal = 1 [egg_info] tag_build = tag_date = 0 tag_svn_revision = 0