pax_global_header00006660000000000000000000000064131215570500014511gustar00rootroot0000000000000052 comment=4ede765a38b1fe4d7f3446848bc71f31fdece34b vertica-python-0.7.3/000077500000000000000000000000001312155705000144745ustar00rootroot00000000000000vertica-python-0.7.3/.gitignore000066400000000000000000000006461312155705000164720ustar00rootroot00000000000000*.py[cod] test.py # C extensions *.so # Packages *.egg *.egg-info dist build eggs parts bin var sdist develop-eggs .installed.cfg lib64 # Installer logs pip-log.txt # Unit test / coverage reports .coverage .tox nosetests.xml # Translations *.mo # Mr Developer .mr.developer.cfg .project .pydevproject .DS_Store *.iml # vagrant .vagrant # pycharm .idea # default virtual environment /env/ # pyenv .python-version vertica-python-0.7.3/LICENSE000066400000000000000000000020731312155705000155030ustar00rootroot00000000000000vertica-python Copyright (c) 2013 Uber Technologies, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. vertica-python-0.7.3/MANIFEST.in000066400000000000000000000002101312155705000162230ustar00rootroot00000000000000include LICENSE include requirements.txt include README.md recursive-include vertica_python *.py recursive-exclude vertica_python *.pyc vertica-python-0.7.3/README.md000066400000000000000000000170531312155705000157610ustar00rootroot00000000000000# vertica-python [![PyPI version](https://badge.fury.io/py/vertica-python.png)](http://badge.fury.io/py/vertica-python) 0.6.x adds python3 support (unicode namedparams support is currently broken in python3, see issue 112) 0.5.x changes the connection method to accept kwargs instead of a dict to be more dbapi compliant. copy methods improved and consolidated in 0.5.1 0.4.x breaks some of the older query interfaces (row_handler callback, and connection.query). It replaces the row_handler callback with an iterate() method. Please see examples below If you are on 0.4.x, please upgrade to 0.4.6 as there are various bug fixes vertica-python is a native Python adapter for the Vertica (http://www.vertica.com) database. vertica-python is currently in beta stage; it has been tested for functionality and has a very basic test suite. Please use with caution, and feel free to submit issues and/or pull requests (after running the unit tests). vertica-python has been tested with Vertica 6.1.2/7.0.0+ and Python 2.7/3.4. ## Installation If you're using pip >= 1.4 and you don't already have pytz installed: pip install --pre pytz If you're using pip >= 1.4 and you don't already have python-dateutil installed: pip install --pre python-dateutil To install vertica-python with pip: pip install vertica-python To install vertica-python with pip (with optional namedparams dependencies): # see 'Using named parameters' section below pip install 'vertica-python[namedparams]' Source code for vertica-python can be found at: http://github.com/uber/vertica-python ## Run unit tests To run the tests, you must have access to a Vertica database. Heres one way to go about it: Download docker kitematic: https://kitematic.com/ Spin up a vertica container (i use sumitchawla/vertica) Edit the port number in `tests/test_commons.py` to match the container. Install tox: http://tox.readthedocs.io Edit `tox.ini` envlist property to list the version(s) of python you have installed Run tox: ```bash tox ``` ## Usage **Create connection** ```python import vertica_python conn_info = {'host': '127.0.0.1', 'port': 5433, 'user': 'some_user', 'password': 'some_password', 'database': 'a_database', # 10 minutes timeout on queries 'read_timeout': 600, # default throw error on invalid UTF-8 results 'unicode_error': 'strict', # SSL is disabled by default 'ssl': False, 'connection_timeout': 5 # connection timeout is not enabled by default} # simple connection, with manual close connection = vertica_python.connect(**conn_info) # do things connection.close() # using with for auto connection closing after usage with vertica_python.connect(**conn_info) as connection: # do things ``` You can pass an `ssl.SSLContext` to `ssl` to customize the SSL connection options. For example, ```python import vertica_python import ssl ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ssl_context.verify_mode = ssl.CERT_REQUIRED ssl_context.check_hostname = True ssl_context.load_verify_locations(cafile='/path/to/ca_file.pem') conn_info = {'host': '127.0.0.1', 'port': 5433, 'user': 'some_user', 'password': 'some_password', 'database': 'a_database', 'ssl': ssl_context} connection = vertica_python.connect(**conn_info) ``` See more on SSL options [here](https://docs.python.org/2/library/ssl.html). **Stream query results**: ```python cur = connection.cursor() cur.execute("SELECT * FROM a_table LIMIT 2") for row in cur.iterate(): print(row) # [ 1, 'some text', datetime.datetime(2014, 5, 18, 6, 47, 1, 928014) ] # [ 2, 'something else', None ] ``` Streaming is recommended if you want to further process each row, save the results in a non-list/dict format (e.g. Pandas DataFrame), or save the results in a file. **In-memory results as list**: ```python cur = connection.cursor() cur.execute("SELECT * FROM a_table LIMIT 2") cur.fetchall() # [ [1, 'something'], [2, 'something_else'] ] ``` **In-memory results as dictionary**: ```python cur = connection.cursor('dict') cur.execute("SELECT * FROM a_table LIMIT 2") cur.fetchall() # [ {'id': 1, 'value': 'something'}, {'id': 2, 'value': 'something_else'} ] connection.close() ``` **Query using named parameters**: ```python # Using named parameter bindings requires psycopg2>=2.5.1 which is not includes with the base vertica_python requirements. cur = connection.cursor() cur.execute("SELECT * FROM a_table WHERE a = :propA b = :propB", {'propA': 1, 'propB': 'stringValue'}) cur.fetchall() # [ [1, 'something'], [2, 'something_else'] ] ``` **Insert and commits** : ```python cur = connection.cursor() # inline commit cur.execute("INSERT INTO a_table (a, b) VALUES (1, 'aa'); commit;") # commit in execution cur.execute("INSERT INTO a_table (a, b) VALUES (1, 'aa')") cur.execute("INSERT INTO a_table (a, b) VALUES (2, 'bb')") cur.execute("commit;") # connection.commit() cur.execute("INSERT INTO a_table (a, b) VALUES (1, 'aa')") connection.commit() ``` **Copy** : ```python cur = connection.cursor() cur.copy("COPY test_copy (id, name) from stdin DELIMITER ',' ", csv) ``` Where `csv` is either a string or a file-like object (specifically, any object with a `read()` method). If using a file, the data is streamed. ## Rowcount oddities vertica_python behaves a bit differently than dbapi when returning rowcounts. After a select execution, the rowcount will be -1, indicating that the row count is unknown. The rowcount value will be updated as data is streamed. ```python cur.execute('SELECT 10 things') cur.rowcount == -1 # indicates unknown rowcount cur.fetchone() cur.rowcount == 1 cur.fetchone() cur.rowcount == 2 cur.fetchall() cur.rowcount == 10 ``` After an insert/update/delete, the rowcount will be returned as a single element row: ```python cur.execute("DELETE 3 things") cur.rowcount == -1 # indicates unknown rowcount cur.fetchone()[0] == 3 ``` ## Nextset If you execute multiple statements in a single call to execute(), you can use cursor.nextset() to retrieve all of the data. ```python cur.execute('SELECT 1; SELECT 2;') cur.fetchone() # [1] cur.fetchone() # None cur.nextset() # True cur.fetchone() # [2] cur.fetchone() # None cur.nextset() # None ``` ## UTF-8 encoding issues While Vertica expects varchars stored to be UTF-8 encoded, sometimes invalid strings get into the database. You can specify how to handle reading these characters using the unicode_error connection option. This uses the same values as the unicode type (https://docs.python.org/2/library/functions.html#unicode) ```python cur = vertica_python.Connection({..., 'unicode_error': 'strict'}).cursor() cur.execute(r"SELECT E'\xC2'") cur.fetchone() # caught 'utf8' codec can't decode byte 0xc2 in position 0: unexpected end of data cur = vertica_python.Connection({..., 'unicode_error': 'replace'}).cursor() cur.execute(r"SELECT E'\xC2'") cur.fetchone() # � cur = vertica_python.Connection({..., 'unicode_error': 'ignore'}).cursor() cur.execute(r"SELECT E'\xC2'") cur.fetchone() # ``` ## License MIT License, please see `LICENSE` for details. ## Acknowledgements Many thanks go to the contributors to the Ruby Vertica gem (https://github.com/sprsquish/vertica), since they did all of the wrestling with Vertica's protocol and have kept the gem updated. They are: * [Matt Bauer](http://github.com/mattbauer) * [Jeff Smick](http://github.com/sprsquish) * [Willem van Bergen](http://github.com/wvanbergen) * [Camilo Lopez](http://github.com/camilo) vertica-python-0.7.3/Vagrantfile000066400000000000000000000016261312155705000166660ustar00rootroot00000000000000# -*- mode: ruby -*- # vi: set ft=ruby : ENV["VAGRANT_DEFAULT_PROVIDER"] ||= "docker" VAGRANTFILE_API_VERSION = "2" ####################################################################### # This will set up a box with Vertica Community Edition 7.1.1 # running inside the box in a Docker container. # # The purpose is to have a Vertica instance that can be used by tests. # # Vertica's port 5433 is exposed to host machine. # Database 'docker' is available. # User is 'dbadmin' with no password. # # >>> # ! As is, any data stored inside Vertica will not live through # ! container or VM restart. # >>> ####################################################################### Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| config.vm.provider "docker" do |d| d.image = "sumitchawla/vertica:latest" d.ports = ["5433:5433"] end config.vm.synced_folder ".", "/vagrant", disabled: true end vertica-python-0.7.3/setup.py000066400000000000000000000021541312155705000162100ustar00rootroot00000000000000#!/usr/bin/env python import collections from setuptools import setup, find_packages ReqOpts = collections.namedtuple('ReqOpts', ['skip_requirements_regex', 'default_vcs']) opts = ReqOpts(None, 'git') # version should use the format 'x.x.x' (instead of 'vx.x.x') setup( name='vertica-python', version='0.7.3', description='A native Python client for the Vertica database.', author='Justin Berka, Alex Kim', author_email='justin.berka@gmail.com, alex.kim@uber.com', url='https://github.com/uber/vertica-python/', keywords="database vertica", packages=find_packages(), license="MIT", install_requires=[ 'python-dateutil>=1.5', 'pytz', 'future', 'six>=1.10.0' ], extras_require={'namedparams': ['psycopg2>=2.5.1']}, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Topic :: Database", "Topic :: Database :: Database Engines/Servers", "Operating System :: OS Independent" ] ) vertica-python-0.7.3/tox.ini000066400000000000000000000001661312155705000160120ustar00rootroot00000000000000[tox] envlist = py27,py34,py35,py36 [testenv] passenv = * commands = nosetests deps = nose==1.3.6 psycopg2>=2.5.1 vertica-python-0.7.3/vertica_python/000077500000000000000000000000001312155705000175325ustar00rootroot00000000000000vertica-python-0.7.3/vertica_python/__init__.py000066400000000000000000000023541312155705000216470ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from .vertica.connection import Connection, connect # Importing exceptions for compatibility with dbapi 2.0. # See: PEP 249 - Python Database API 2.0 # https://www.python.org/dev/peps/pep-0249/#exceptions from . import errors from .errors import ( Error, Warning, DataError, DatabaseError, IntegrityError, InterfaceError, InternalError, NotSupportedError, OperationalError, ProgrammingError) # Main module for this library. __author__ = 'Uber Technologies, Inc' __copyright__ = 'Copyright 2013, Uber Technologies, Inc.' __license__ = 'MIT' __all__ = ['Connection', 'PROTOCOL_VERSION', 'version_info', 'apilevel', 'threadsafety', 'paramstyle', 'connect', 'Error', 'Warning', 'DataError', 'DatabaseError', 'IntegrityError', 'InterfaceError', 'InternalError', 'NotSupportedError', 'OperationalError', 'ProgrammingError'] # The version number of this library. version_info = (0, 7, 3) __version__ = '.'.join(map(str, version_info)) # The protocol version (3.0.0) implemented in this library. PROTOCOL_VERSION = 3 << 16 apilevel = 2.0 threadsafety = 1 # Threads may share the module, but not connections! paramstyle = 'named' # WHERE name=:name vertica-python-0.7.3/vertica_python/compat.py000066400000000000000000000057671312155705000214060ustar00rootroot00000000000000# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functions for Python 2 vs. 3 compatibility. ## Conversion routines In addition to the functions below, `as_str` converts an object to a `str`. @@as_bytes @@as_text @@as_str_any ## Types The compatibility module also provides the following types: * `bytes_or_text_types` * `complex_types` * `integral_types` * `real_types` """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import six as _six def as_bytes(bytes_or_text, encoding='utf-8'): """Converts either bytes or unicode to `bytes`, using utf-8 encoding for text. Args: bytes_or_text: A `bytes`, `str`, or `unicode` object. encoding: A string indicating the charset for encoding unicode. Returns: A `bytes` object. Raises: TypeError: If `bytes_or_text` is not a binary or unicode string. """ if isinstance(bytes_or_text, _six.text_type): return bytes_or_text.encode(encoding) elif isinstance(bytes_or_text, bytes): return bytes_or_text else: raise TypeError('Expected binary or unicode string, got %r' % (bytes_or_text,)) def as_text(bytes_or_text, encoding='utf-8'): """Returns the given argument as a unicode string. Args: bytes_or_text: A `bytes`, `str, or `unicode` object. encoding: A string indicating the charset for decoding unicode. Returns: A `unicode` (Python 2) or `str` (Python 3) object. Raises: TypeError: If `bytes_or_text` is not a binary or unicode string. """ if isinstance(bytes_or_text, _six.text_type): return bytes_or_text elif isinstance(bytes_or_text, bytes): return bytes_or_text.decode(encoding) else: raise TypeError('Expected binary or unicode string, got %r' % bytes_or_text) # Convert an object to a `str` in both Python 2 and 3. if _six.PY2: as_str = as_bytes else: as_str = as_text def as_str_any(value): """Converts to `str` as `str(value)`, but use `as_str` for `bytes`. Args: value: A object that can be converted to `str`. Returns: A `str` object. """ if isinstance(value, bytes): return as_str(value) else: return str(value) # Either bytes or text. bytes_or_text_types = (bytes, _six.text_type) _allowed_symbols = [ 'as_str', 'bytes_or_text_types', ] vertica-python-0.7.3/vertica_python/datatypes.py000066400000000000000000000020031312155705000220750ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from datetime import date, datetime, time # noinspection PyPep8Naming def Date(year, month, day): return date(year, month, day) # noinspection PyPep8Naming def Time(hour, minute, second): return time(hour, minute, second) # noinspection PyPep8Naming def Timestamp(year, month, day, hour, minute, second): return datetime(year, month, day, hour, minute, second) # noinspection PyPep8Naming def DateFromTicks(ticks): d = datetime.utcfromtimestamp(ticks) return d.date() # noinspection PyPep8Naming def TimeFromTicks(ticks): d = datetime.utcfromtimestamp(ticks) return d.time() # noinspection PyPep8Naming def TimestampFromTicks(ticks): d = datetime.utcfromtimestamp(ticks) return d.time() class Bytea(str): pass # noinspection PyPep8Naming def Binary(string): return Bytea(string) # vertica doesnt have a binary or row_id type i think STRING = 9 BINARY = 10000 NUMBER = 16 DATETIME = 12 ROWID = 10001 vertica-python-0.7.3/vertica_python/errors.py000066400000000000000000000052111312155705000214170ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import import re ############################################# # dbapi errors ############################################# class Error(Exception): pass # noinspection PyShadowingBuiltins class Warning(Exception): pass class InterfaceError(Error): pass class DatabaseError(Error): pass class InternalError(DatabaseError): pass class OperationalError(DatabaseError): pass class ProgrammingError(DatabaseError): pass class IntegrityError(DatabaseError): pass class DataError(DatabaseError): pass class NotSupportedError(DatabaseError): pass # # Other Errors # class TimedOutError(OperationalError): pass class ConnectionError(DatabaseError): pass class SSLNotSupported(ConnectionError): pass class MessageError(InternalError): pass class EmptyQueryError(ProgrammingError): pass class QueryError(ProgrammingError): def __init__(self, error_response, sql): self.error_response = error_response self.sql = sql ProgrammingError.__init__(self, "{0}, SQL: {1}".format(error_response.error_message(), repr(self.one_line_sql()))) def one_line_sql(self): if self.sql: return re.sub(r"[\r\n]+", ' ', self.sql) else: return '' @classmethod def from_error_response(cls, error_response, sql): klass = QUERY_ERROR_CLASSES.get(error_response.sqlstate, None) if klass is None: klass = cls return klass(error_response, sql) class LockFailure(QueryError): pass class InsufficientResources(QueryError): pass class OutOfMemory(QueryError): pass class VerticaSyntaxError(QueryError): pass class MissingSchema(QueryError): pass class MissingRelation(QueryError): pass class MissingColumn(QueryError): pass class CopyRejected(QueryError): pass class PermissionDenied(QueryError): pass class InvalidDatetimeFormat(QueryError): pass class DuplicateObject(QueryError): pass class QueryCanceled(QueryError): pass class ConnectionFailure(QueryError): pass QUERY_ERROR_CLASSES = { b'55V03': LockFailure, b'53000': InsufficientResources, b'53200': OutOfMemory, b'42601': VerticaSyntaxError, b'3F000': MissingSchema, b'42V01': MissingRelation, b'42703': MissingColumn, b'22V04': CopyRejected, b'42501': PermissionDenied, b'22007': InvalidDatetimeFormat, b'42710': DuplicateObject, b'57014': QueryCanceled, b'08006': ConnectionFailure } vertica-python-0.7.3/vertica_python/tests/000077500000000000000000000000001312155705000206745ustar00rootroot00000000000000vertica-python-0.7.3/vertica_python/tests/__init__.py000066400000000000000000000000001312155705000227730ustar00rootroot00000000000000vertica-python-0.7.3/vertica_python/tests/base.py000066400000000000000000000065451312155705000221720ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import import os import unittest from six import string_types from .. import * from ..compat import as_text, as_str, as_bytes DEFAULT_VP_TEST_HOST = '127.0.0.1' DEFAULT_VP_TEST_PORT = 5433 DEFAULT_VP_TEST_USER = 'dbadmin' DEFAULT_VP_TEST_PASSWD = '' DEFAULT_VP_TEST_DB = 'docker' DEFAULT_VP_TEST_TABLE = 'vertica_python_unit_test' class VerticaPythonTestCase(unittest.TestCase): """Base class for tests that query Vertica.""" @classmethod def setUpClass(cls): cls._host = os.getenv('VP_TEST_HOST', DEFAULT_VP_TEST_HOST) cls._port = int(os.getenv('VP_TEST_PORT', DEFAULT_VP_TEST_PORT)) cls._user = os.getenv('VP_TEST_USER', DEFAULT_VP_TEST_USER) cls._password = os.getenv('VP_TEST_PASSWD', DEFAULT_VP_TEST_PASSWD) cls._database = os.getenv('VP_TEST_DB', DEFAULT_VP_TEST_DB) cls._table = os.getenv('VP_TEST_TABLE', DEFAULT_VP_TEST_TABLE) cls._conn_info = { 'host': cls._host, 'port': cls._port, 'database': cls._database, 'user': cls._user, 'password': cls._password, } @classmethod def tearDownClass(cls): with cls._connect() as conn: cur = conn.cursor() cur.execute("DROP TABLE IF EXISTS {0}".format(cls._table)) @classmethod def _connect(cls): """Connects to vertica. :return: a connection to vertica. """ return connect(**cls._conn_info) def _query_and_fetchall(self, query): """Creates a new connection, executes a query and fetches all the results. :param query: query to execute :return: all fetched results as returned by cursor.fetchall() """ with self._connect() as conn: cur = conn.cursor() cur.execute(query) results = cur.fetchall() return results def _query_and_fetchone(self, query): """Creates a new connection, executes a query and fetches one result. :param query: query to execute :return: the first result fetched by cursor.fetchone() """ with self._connect() as conn: cur = conn.cursor() cur.execute(query) result = cur.fetchone() return result def assertTextEqual(self, first, second, msg=None): first_text = as_text(first) second_text = as_text(second) self.assertEqual(first=first_text, second=second_text, msg=msg) def assertStrEqual(self, first, second, msg=None): first_str = as_str(first) second_str = as_str(second) self.assertEqual(first=first_str, second=second_str, msg=msg) def assertBytesEqual(self, first, second, msg=None): first_bytes = as_bytes(first) second_bytes = as_bytes(second) self.assertEqual(first=first_bytes, second=second_bytes, msg=msg) def assertResultEqual(self, value, result, msg=None): if isinstance(value, string_types): self.assertTextEqual(first=value, second=result, msg=msg) else: self.assertEqual(first=value, second=result, msg=msg) def assertListOfListsEqual(self, list1, list2, msg=None): self.assertEqual(len(list1), len(list2), msg=msg) for l1, l2 in zip(list1, list2): self.assertListEqual(l1, l2, msg=msg) vertica-python-0.7.3/vertica_python/tests/test_column.py000066400000000000000000000011521312155705000236010ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from .base import VerticaPythonTestCase class ColumnTestCase(VerticaPythonTestCase): def test_column_names_query(self): columns = ['isocode', 'name'] with self._connect() as conn: cur = conn.cursor() cur.execute(""" SELECT 'US' AS {0}, 'United States' AS {1} UNION ALL SELECT 'CA', 'Canada' UNION ALL SELECT 'MX', 'Mexico' """.format(*columns)) description = cur.description self.assertListEqual([d.name for d in description], columns) vertica-python-0.7.3/vertica_python/tests/test_cursor.py000066400000000000000000000345251312155705000236330ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import import logging import tempfile from .base import VerticaPythonTestCase from .. import errors logger = logging.getLogger('vertica') class CursorTestCase(VerticaPythonTestCase): def setUp(self): self._init_table() def tearDown(self): # self._init_table() pass def _init_table(self): with self._connect() as conn: cur = conn.cursor() # clean old table cur.execute("DROP TABLE IF EXISTS {0}".format(self._table)) # create test table cur.execute("""CREATE TABLE {0} ( a INT, b VARCHAR(32) ) """.format(self._table)) def test_inline_commit(self): with self._connect() as conn: cur = conn.cursor() cur.execute( "INSERT INTO {0} (a, b) VALUES (1, 'aa'); COMMIT;".format(self._table)) cur.execute("SELECT a, b FROM {0} WHERE a = 1".format(self._table)) # unknown rowcount self.assertEqual(cur.rowcount, -1) res = cur.fetchall() self.assertEqual(cur.rowcount, 1) self.assertListOfListsEqual(res, [[1, 'aa']]) def test_multi_inserts_and_transaction(self): with self._connect() as conn1, self._connect() as conn2: cur1 = conn1.cursor() cur2 = conn2.cursor() # insert data without a commit cur1.execute("INSERT INTO {0} (a, b) VALUES (2, 'bb')".format(self._table)) # verify we can see it from this cursor cur1.execute("SELECT a, b FROM {0} WHERE a = 2".format(self._table)) res_from_cur_1_before_commit = cur1.fetchall() self.assertListOfListsEqual(res_from_cur_1_before_commit, [[2, 'bb']]) # verify we cant see it from other cursor cur2.execute("SELECT a, b FROM {0} WHERE a = 2".format(self._table)) res_from_cur2_before_commit = cur2.fetchall() self.assertListOfListsEqual(res_from_cur2_before_commit, []) # insert more data then commit cur1.execute("INSERT INTO {0} (a, b) VALUES (3, 'cc')".format(self._table)) cur1.execute("COMMIT") # verify we can see it from this cursor cur1.execute( "SELECT a, b FROM {0} WHERE a = 2 OR a = 3 ORDER BY a".format(self._table)) res_from_cur1_after_commit = cur1.fetchall() self.assertListOfListsEqual(res_from_cur1_after_commit, [[2, 'bb'], [3, 'cc']]) # verify we can see it from other cursor cur2.execute( "SELECT a, b FROM {0} WHERE a = 2 OR a = 3 ORDER BY a".format(self._table)) res_from_cur2_after_commit = cur2.fetchall() self.assertListOfListsEqual(res_from_cur2_after_commit, [[2, 'bb'], [3, 'cc']]) def test_conn_commit(self): with self._connect() as conn: cur = conn.cursor() cur.execute("INSERT INTO {0} (a, b) VALUES (5, 'cc')".format(self._table)) conn.commit() with self._connect() as conn: cur = conn.cursor() cur.execute("SELECT a, b FROM {0} WHERE a = 5".format(self._table)) res = cur.fetchall() self.assertListOfListsEqual(res, [[5, 'cc']]) def test_delete(self): with self._connect() as conn: cur = conn.cursor() cur.execute("INSERT INTO {0} (a, b) VALUES (5, 'cc')".format(self._table)) self.assertEqual(cur.rowcount, -1) update_res = cur.fetchall() self.assertListOfListsEqual(update_res, [[1]]) conn.commit() # validate delete count cur.execute("DELETE FROM {0} WHERE a = 5".format(self._table)) self.assertEqual(cur.rowcount, -1) delete_res = cur.fetchall() self.assertListOfListsEqual(delete_res, [[1]]) conn.commit() # validate deleted cur.execute("SELECT a, b FROM {0} WHERE a = 5".format(self._table)) res = cur.fetchall() self.assertListOfListsEqual(res, []) def test_update(self): with self._connect() as conn: cur = conn.cursor() cur.execute("INSERT INTO {0} (a, b) VALUES (5, 'cc')".format(self._table)) # validate insert count insert_res = cur.fetchall() self.assertListOfListsEqual(insert_res, [[1]], msg='Bad INSERT response') conn.commit() cur.execute("UPDATE {0} SET b = 'ff' WHERE a = 5".format(self._table)) # validate update count assert cur.rowcount == -1 update_res = cur.fetchall() self.assertListOfListsEqual(update_res, [[1]], msg='Bad UPDATE response') conn.commit() cur.execute("SELECT a, b FROM {0} WHERE a = 5".format(self._table)) res = cur.fetchall() self.assertListOfListsEqual(res, [[5, 'ff']]) def test_copy_with_string(self): with self._connect() as conn1, self._connect() as conn2: cur1 = conn1.cursor() cur2 = conn2.cursor() cur1.copy("COPY {0} (a, b) FROM STDIN DELIMITER ','".format(self._table), "1,foo\n2,bar") # no commit necessary for copy cur1.execute("SELECT a, b FROM {0} WHERE a = 1".format(self._table)) res_from_cur1 = cur1.fetchall() self.assertListOfListsEqual(res_from_cur1, [[1, 'foo']]) cur2.execute("SELECT a, b FROM {0} WHERE a = 2".format(self._table)) res_from_cur2 = cur2.fetchall() self.assertListOfListsEqual(res_from_cur2, [[2, 'bar']]) def test_copy_with_file(self): f = tempfile.TemporaryFile() f.write(b"1,foo\n2,bar") # move rw pointer to top of file f.seek(0) with self._connect() as conn1, self._connect() as conn2: cur1 = conn1.cursor() cur2 = conn2.cursor() cur1.copy("COPY {0} (a, b) FROM STDIN DELIMITER ','".format(self._table), f) # no commit necessary for copy cur1.execute("SELECT a, b FROM {0} WHERE a = 1".format(self._table)) res_from_cur1 = cur1.fetchall() self.assertListOfListsEqual(res_from_cur1, [[1, 'foo']]) cur2.execute("SELECT a, b FROM {0} WHERE a = 2".format(self._table)) res_from_cur2 = cur2.fetchall() self.assertListOfListsEqual(res_from_cur2, [[2, 'bar']]) # unit test for #78 def test_copy_with_data_in_buffer(self): with self._connect() as conn: cur = conn.cursor() cur.execute("SELECT 1;") res = cur.fetchall() self.assertListOfListsEqual(res, [[1]]) cur.copy("COPY {0} (a, b) FROM STDIN DELIMITER ','".format(self._table), "1,foo\n2,bar") cur.execute("SELECT 1;") res = cur.fetchall() self.assertListOfListsEqual(res, [[1]]) def test_with_conn(self): with self._connect() as conn: cur = conn.cursor() cur.execute("INSERT INTO {0} (a, b) VALUES (1, 'aa'); COMMIT;".format(self._table)) cur.execute("SELECT a, b FROM {0} WHERE a = 1".format(self._table)) res = cur.fetchall() self.assertListOfListsEqual(res, [[1, 'aa']]) def test_iterator(self): with self._connect() as conn: cur = conn.cursor() values = [[1, 'aa'], [2, 'bb'], [3, 'cc']] for n, s in values: cur.execute("INSERT INTO {0} (a, b) VALUES (:n, :s)".format(self._table), {'n': n, 's': s}) conn.commit() cur.execute("SELECT a, b FROM {0} ORDER BY a ASC".format(self._table)) for val, res in zip(sorted(values), cur.iterate()): self.assertListEqual(res, val) remaining = cur.fetchall() self.assertListOfListsEqual(remaining, []) def test_mid_iterator_execution(self): with self._connect() as conn: cur = conn.cursor() values = [[1, 'aa'], [2, 'bb'], [3, 'cc']] for n, s in values: cur.execute("INSERT INTO {0} (a, b) VALUES (:n, :s)".format(self._table), {'n': n, 's': s}) conn.commit() cur.execute("SELECT a, b FROM {0} ORDER BY a ASC".format(self._table)) for val, res in zip(sorted(values), cur.iterate()): self.assertListEqual(res, val) break # stop after one comparison # make new query and verify result cur.execute("SELECT COUNT(*) FROM {0}".format(self._table)) res = cur.fetchall() self.assertListOfListsEqual(res, [[3]]) def test_query_errors(self): with self._connect() as conn: cur = conn.cursor() # create table syntax error with self.assertRaises(errors.VerticaSyntaxError): cur.execute("""CREATE TABLE {0}_fail ( a INT, b VARCHAR(32),,, ); """.format(self._table)) # select table not found error cur.execute("INSERT INTO {0} (a, b) VALUES (1, 'aa'); COMMIT;".format(self._table)) with self.assertRaises(errors.QueryError): cur.execute("SELECT * FROM {0}_fail".format(self._table)) # verify cursor still usable after errors cur.execute("SELECT a, b FROM {0} WHERE a = 1".format(self._table)) res = cur.fetchall() self.assertListOfListsEqual(res, [[1, 'aa']]) def test_cursor_close_and_reuse(self): with self._connect() as conn: cur = conn.cursor() # insert data cur.execute("INSERT INTO {0} (a, b) VALUES (2, 'bb'); COMMIT;".format(self._table)) # (query -> close -> reopen) * 3 times for _ in range(3): cur.execute("SELECT a, b FROM {0} WHERE a = 2".format(self._table)) res = cur.fetchall() self.assertListOfListsEqual(res, [[2, 'bb']]) # close and reopen cursor cur.close() cur = conn.cursor() # unit test for #74 def test_nextset(self): with self._connect() as conn: cur = conn.cursor() cur.execute("SELECT 1; SELECT 2;") res1 = cur.fetchall() self.assertListOfListsEqual(res1, [[1]]) self.assertIsNone(cur.fetchone()) self.assertTrue(cur.nextset()) res2 = cur.fetchall() self.assertListOfListsEqual(res2, [[2]]) self.assertIsNone(cur.fetchone()) self.assertFalse(cur.nextset()) # unit test for #74 def test_nextset_with_delete(self): with self._connect() as conn: cur = conn.cursor() # insert data cur.execute("INSERT INTO {0} (a, b) VALUES (1, 'aa')".format(self._table)) cur.execute("INSERT INTO {0} (a, b) VALUES (2, 'bb')".format(self._table)) conn.commit() cur.execute(""" SELECT * FROM {0} ORDER BY a ASC; DELETE FROM {0}; SELECT * FROM {0} ORDER BY a ASC; """.format(self._table)) # check first select results res1 = cur.fetchall() self.assertListOfListsEqual(res1, [[1, 'aa'], [2, 'bb']]) self.assertIsNone(cur.fetchone()) self.assertTrue(cur.nextset()) # check delete results res2 = cur.fetchall() self.assertListOfListsEqual(res2, [[2]]) self.assertIsNone(cur.fetchone()) self.assertTrue(cur.nextset()) # check second select results res3 = cur.fetchall() self.assertListOfListsEqual(res3, []) self.assertIsNone(cur.fetchone()) self.assertFalse(cur.nextset()) # unit test for #124 def test_nextset_with_error(self): with self._connect() as conn: cur = conn.cursor() cur.execute("SELECT 1; SELECT a; SELECT 2") # verify data from first query res1 = cur.fetchall() self.assertListOfListsEqual(res1, [[1]]) self.assertIsNone(cur.fetchone()) # second statement results in a query error with self.assertRaises(errors.MissingColumn): cur.nextset() # unit test for #144 def test_empty_query(self): with self._connect() as conn: cur = conn.cursor() cur.execute("") res = cur.fetchall() self.assertListOfListsEqual(res, []) class TestExecutemany(VerticaPythonTestCase): def setUp(self): self._init_table() def tearDown(self): # self._init_table() pass def _init_table(self): with self._connect() as conn: cur = conn.cursor() # clean old table cur.execute("DROP TABLE IF EXISTS {0}".format(self._table)) # create test table cur.execute("""CREATE TABLE {0} ( a INT, b VARCHAR(32) ) """.format(self._table)) def _test_executemany(self, table, seq_of_values): with self._connect() as conn: cur = conn.cursor() cur.executemany("INSERT INTO {0} (a, b) VALUES (%s, %s)".format(table), seq_of_values) conn.commit() cur.execute("SELECT * FROM {0} ORDER BY a ASC, b ASC".format(table)) # check first select results res1 = cur.fetchall() seq_of_values_to_compare = sorted([list(values) for values in seq_of_values]) self.assertListOfListsEqual(res1, seq_of_values_to_compare) self.assertIsNone(cur.fetchone()) def test_executemany(self): self._test_executemany(self._table, [(1, 'aa'), (2, 'bb')]) def test_executemany_quoted_path(self): table = '.'.join(['"{}"'.format(s.strip('"')) for s in self._table.split('.')]) self._test_executemany(table, [(1, 'aa'), (2, 'bb')]) vertica-python-0.7.3/vertica_python/tests/test_datatypes.py000066400000000000000000000014261312155705000243060ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from decimal import Decimal from .base import VerticaPythonTestCase class TypeTestCase(VerticaPythonTestCase): def test_decimal_query(self): value = Decimal(0.42) query = "SELECT {0}::numeric".format(value) res = self._query_and_fetchone(query) self.assertAlmostEqual(res[0], value) def test_boolean_query__true(self): value = True query = "SELECT {0}::boolean".format(value) res = self._query_and_fetchone(query) self.assertEqual(res[0], value) def test_boolean_query__false(self): value = False query = "SELECT {0}::boolean".format(value) res = self._query_and_fetchone(query) self.assertEqual(res[0], value) vertica-python-0.7.3/vertica_python/tests/test_dates.py000066400000000000000000000251571312155705000234170ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from collections import namedtuple from datetime import date, datetime from .base import VerticaPythonTestCase from .. import errors from ..vertica.column import timestamp_parse DateTestingCase = namedtuple("DateTestingCase", ["string", "template", "date"]) TimestampTestingCase = namedtuple("TimestampTestingCase", ["string", "timestamp"]) class DateParsingTestCase(VerticaPythonTestCase): """Testing DATE type parsing with focus on 'AD'/'BC'. Note: the 'BC' or 'AD' era indicators in Vertica's date format seem to make Vertica behave as follows: 1. Both 'BC' and 'AD' are simply a flags that tell Vertica: include era indicator if the date is Before Christ 2. Dates in 'AD' will never include era indicator """ def _test_dates(self, test_cases, msg=None): with self._connect() as conn: cur = conn.cursor() for tc in test_cases: cur.execute("SELECT TO_DATE('{0}', '{1}')".format(tc.string, tc.template)) res = cur.fetchall() self.assertListOfListsEqual(res, [[tc.date]], msg=msg) def _test_not_supported(self, test_cases, msg=None): with self._connect() as conn: cur = conn.cursor() for tc in test_cases: with self.assertRaises(errors.NotSupportedError, msg=msg): cur.execute("SELECT TO_DATE('{0}', '{1}')".format(tc.string, tc.template)) res = cur.fetchall() self.assertListOfListsEqual(res, [[tc.date]]) def test_no_to_no(self): test_cases = [ DateTestingCase('1985-10-25', 'YYYY-MM-DD', date(1985, 10, 25)), DateTestingCase('1955-11-12', 'YYYY-MM-DD', date(1955, 11, 12)), DateTestingCase('1885-01-01', 'YYYY-MM-DD', date(1885, 1, 1)), DateTestingCase('2015-10-21', 'YYYY-MM-DD', date(2015, 10, 21)), ] self._test_dates(test_cases=test_cases, msg='no indicator -> no indicator') def test_ad_to_no(self): test_cases = [ DateTestingCase('1985-10-25 AD', 'YYYY-MM-DD', date(1985, 10, 25)), DateTestingCase('1955-11-12 AD', 'YYYY-MM-DD', date(1955, 11, 12)), DateTestingCase('1885-01-01 AD', 'YYYY-MM-DD', date(1885, 1, 1)), DateTestingCase('2015-10-21 AD', 'YYYY-MM-DD', date(2015, 10, 21)), ] self._test_dates(test_cases=test_cases, msg='AD indicator -> no indicator') def test_bc_to_no(self): test_cases = [ DateTestingCase('1985-10-25 BC', 'YYYY-MM-DD', date(1985, 10, 25)), DateTestingCase('1955-11-12 BC', 'YYYY-MM-DD', date(1955, 11, 12)), DateTestingCase('1885-01-01 BC', 'YYYY-MM-DD', date(1885, 1, 1)), DateTestingCase('2015-10-21 BC', 'YYYY-MM-DD', date(2015, 10, 21)), ] self._test_dates(test_cases=test_cases, msg='BC indicator -> no indicator') def test_no_to_ad(self): test_cases = [ DateTestingCase('1985-10-25', 'YYYY-MM-DD AD', date(1985, 10, 25)), DateTestingCase('1955-11-12', 'YYYY-MM-DD AD', date(1955, 11, 12)), DateTestingCase('1885-01-01', 'YYYY-MM-DD AD', date(1885, 1, 1)), DateTestingCase('2015-10-21', 'YYYY-MM-DD AD', date(2015, 10, 21)), ] self._test_dates(test_cases=test_cases, msg='no indicator -> AD indicator') def test_ad_to_ad(self): test_cases = [ DateTestingCase('1985-10-25 AD', 'YYYY-MM-DD AD', date(1985, 10, 25)), DateTestingCase('1955-11-12 AD', 'YYYY-MM-DD AD', date(1955, 11, 12)), DateTestingCase('1885-01-01 AD', 'YYYY-MM-DD AD', date(1885, 1, 1)), DateTestingCase('2015-10-21 AD', 'YYYY-MM-DD AD', date(2015, 10, 21)), ] self._test_dates(test_cases=test_cases, msg='AD indicator -> AD indicator') def test_bc_to_ad(self): test_cases = [ DateTestingCase('1985-10-25 BC', 'YYYY-MM-DD AD', date(1985, 10, 25)), DateTestingCase('1955-11-12 BC', 'YYYY-MM-DD AD', date(1955, 11, 12)), DateTestingCase('1885-01-01 BC', 'YYYY-MM-DD AD', date(1885, 1, 1)), DateTestingCase('2015-10-21 BC', 'YYYY-MM-DD AD', date(2015, 10, 21)), ] self._test_not_supported(test_cases=test_cases, msg='BC indicator -> AD indicator') def test_no_to_bc(self): test_cases = [ DateTestingCase('1985-10-25', 'YYYY-MM-DD BC', date(1985, 10, 25)), DateTestingCase('1955-11-12', 'YYYY-MM-DD BC', date(1955, 11, 12)), DateTestingCase('1885-01-01', 'YYYY-MM-DD BC', date(1885, 1, 1)), DateTestingCase('2015-10-21', 'YYYY-MM-DD BC', date(2015, 10, 21)), ] self._test_dates(test_cases=test_cases, msg='no indicator -> BC indicator') def test_ad_to_bc(self): test_cases = [ DateTestingCase('1985-10-25 AD', 'YYYY-MM-DD BC', date(1985, 10, 25)), DateTestingCase('1955-11-12 AD', 'YYYY-MM-DD BC', date(1955, 11, 12)), DateTestingCase('1885-01-01 AD', 'YYYY-MM-DD BC', date(1885, 1, 1)), DateTestingCase('2015-10-21 AD', 'YYYY-MM-DD BC', date(2015, 10, 21)), ] self._test_dates(test_cases=test_cases, msg='AD indicator -> BC indicator') def test_bc_to_bc(self): test_cases = [ DateTestingCase('1985-10-25 BC', 'YYYY-MM-DD BC', date(1985, 10, 25)), DateTestingCase('1955-11-12 BC', 'YYYY-MM-DD BC', date(1955, 11, 12)), DateTestingCase('1885-01-01 BC', 'YYYY-MM-DD BC', date(1885, 1, 1)), DateTestingCase('2015-10-21 BC', 'YYYY-MM-DD BC', date(2015, 10, 21)), ] self._test_not_supported(test_cases=test_cases, msg='BC indicator -> BC indicator') class TimestampParsingTestCase(VerticaPythonTestCase): def _test_timestamps(self, test_cases, msg=None): for tc in test_cases: self.assertEqual(timestamp_parse(tc.string), tc.timestamp, msg=msg) def test_timestamp_second_resolution(self): test_cases = [ # back to the future dates TimestampTestingCase( '1985-10-26 01:25:01', datetime(year=1985, month=10, day=26, hour=1, minute=25, second=1) ), TimestampTestingCase( '1955-11-12 22:55:02', datetime(year=1955, month=11, day=12, hour=22, minute=55, second=2) ), TimestampTestingCase( '2015-10-21 11:12:03', datetime(year=2015, month=10, day=21, hour=11, minute=12, second=3) ), TimestampTestingCase( '1885-01-01 01:02:04', datetime(year=1885, month=1, day=1, hour=1, minute=2, second=4) ), TimestampTestingCase( '1885-09-02 02:03:05', datetime(year=1885, month=9, day=2, hour=2, minute=3, second=5) ), ] self._test_timestamps(test_cases=test_cases, msg='timestamp second resolution') def test_timestamp_microsecond_resolution(self): test_cases = [ # back to the future dates TimestampTestingCase( '1985-10-26 01:25:01.1', datetime(year=1985, month=10, day=26, hour=1, minute=25, second=1, microsecond=100000) ), TimestampTestingCase( '1955-11-12 22:55:02.01', datetime(year=1955, month=11, day=12, hour=22, minute=55, second=2, microsecond=10000) ), TimestampTestingCase( '2015-10-21 11:12:03.001', datetime(year=2015, month=10, day=21, hour=11, minute=12, second=3, microsecond=1000) ), TimestampTestingCase( '1885-01-01 01:02:04.000001', datetime(year=1885, month=1, day=1, hour=1, minute=2, second=4, microsecond=1) ), TimestampTestingCase( '1885-09-02 02:03:05.002343', datetime(year=1885, month=9, day=2, hour=2, minute=3, second=5, microsecond=2343) ), ] self._test_timestamps(test_cases=test_cases, msg='timestamp microsecond resolution') def test_timestamp_year_over_9999_second_resolution(self): """Asserts that years over 9999 are truncated to 9999""" test_cases = [ TimestampTestingCase( '19850-10-26 01:25:01', datetime(year=9999, month=10, day=26, hour=1, minute=25, second=1) ), TimestampTestingCase( '10000-11-12 22:55:02', datetime(year=9999, month=11, day=12, hour=22, minute=55, second=2) ), TimestampTestingCase( '9999-10-21 11:12:03', datetime(year=9999, month=10, day=21, hour=11, minute=12, second=3) ), TimestampTestingCase( '18850-01-01 01:02:04', datetime(year=9999, month=1, day=1, hour=1, minute=2, second=4) ), TimestampTestingCase( '18850-09-02 02:03:05', datetime(year=9999, month=9, day=2, hour=2, minute=3, second=5) ), ] self._test_timestamps(test_cases=test_cases, msg='timestamp past 9999 second resolution') def test_timestamp_year_over_9999_microsecond_resolution(self): test_cases = [ TimestampTestingCase( '19850-10-26 01:25:01.1', datetime(year=9999, month=10, day=26, hour=1, minute=25, second=1, microsecond=100000) ), TimestampTestingCase( '10000-11-12 22:55:02.01', datetime(year=9999, month=11, day=12, hour=22, minute=55, second=2, microsecond=10000) ), TimestampTestingCase( '9999-10-21 11:12:03.001', datetime(year=9999, month=10, day=21, hour=11, minute=12, second=3, microsecond=1000) ), TimestampTestingCase( '18850-01-01 01:02:04.000001', datetime(year=9999, month=1, day=1, hour=1, minute=2, second=4, microsecond=1) ), TimestampTestingCase( '18850-09-02 02:03:05.002343', datetime(year=9999, month=9, day=2, hour=2, minute=3, second=5, microsecond=2343) ), ] self._test_timestamps(test_cases=test_cases, msg='timestamp past 9999 microsecond resolution') vertica-python-0.7.3/vertica_python/tests/test_errors.py000066400000000000000000000021371312155705000236240ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from .base import VerticaPythonTestCase from .. import errors class ErrorTestCase(VerticaPythonTestCase): def setUp(self): with self._connect() as conn: cur = conn.cursor() cur.execute("DROP TABLE IF EXISTS {0}".format(self._table)) def test_missing_schema(self): with self._connect() as conn: cur = conn.cursor() with self.assertRaises(errors.MissingSchema): cur.execute("SELECT 1 FROM missing_schema.table") def test_missing_relation(self): with self._connect() as conn: cur = conn.cursor() with self.assertRaises(errors.MissingRelation): cur.execute("SELECT 1 FROM missing_table") def test_duplicate_object(self): with self._connect() as conn: cur = conn.cursor() cur.execute("CREATE TABLE {0} (a BOOLEAN)".format(self._table)) with self.assertRaises(errors.DuplicateObject): cur.execute("CREATE TABLE {0} (a BOOLEAN)".format(self._table)) vertica-python-0.7.3/vertica_python/tests/test_timezones.py000066400000000000000000000037461312155705000243340ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from collections import namedtuple from datetime import datetime import pytz from .base import VerticaPythonTestCase TimeZoneTestingCase = namedtuple("TimeZoneTestingCase", ["string", "template", "timestamp"]) class TimeZoneTestCase(VerticaPythonTestCase): def _test_ts(self, test_cases): with self._connect() as conn: cur = conn.cursor() for tc in test_cases: cur.execute("SELECT TO_TIMESTAMP('{0}', '{1}')".format(tc.string, tc.template)) res = cur.fetchone() self.assertEqual(tc.timestamp.toordinal(), res[0].toordinal()) def test_simple_ts_query(self): template = 'YYYY-MM-DD HH:MI:SS.MS' test_cases = [ TimeZoneTestingCase( string='2016-05-15 13:15:17.789', template=template, timestamp=datetime(year=2016, month=5, day=15, hour=13, minute=15, second=17, microsecond=789000) ), ] self._test_ts(test_cases=test_cases) def test_simple_ts_with_tz_query(self): template = 'YYYY-MM-DD HH:MI:SS.MS TZ' test_cases = [ TimeZoneTestingCase( string='2016-05-15 13:15:17.789 UTC', template=template, timestamp=datetime(year=2016, month=5, day=15, hour=13, minute=15, second=17, microsecond=789000, tzinfo=pytz.utc) ), ] self._test_ts(test_cases=test_cases) def test_simple_ts_with_offset_query(self): template = 'YYYY-MM-DD HH:MI:SS.MS+00' test_cases = [ TimeZoneTestingCase( string='2016-05-15 13:15:17.789 UTC', template=template, timestamp=datetime(year=2016, month=5, day=15, hour=13, minute=15, second=17, microsecond=789000, tzinfo=pytz.utc) ), ] self._test_ts(test_cases=test_cases) vertica-python-0.7.3/vertica_python/tests/test_unicode.py000066400000000000000000000052451312155705000237410ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from .base import VerticaPythonTestCase class UnicodeTestCase(VerticaPythonTestCase): def test_unicode_query(self): value = u'\u16a0' query = u"SELECT '{0}'".format(value) with self._connect() as conn: cur = conn.cursor() cur.execute(query) res = cur.fetchone() self.assertResultEqual(value, res[0]) def test_unicode_list_parameter(self): values = [u'\u00f1', 'foo', 3] query = u"SELECT {0}".format(", ".join(["%s"] * len(values))) with self._connect() as conn: cur = conn.cursor() cur.execute(query, tuple(values)) results = cur.fetchone() for val, res in zip(values, results): self.assertResultEqual(val, res) def test_unicode_named_parameter_binding(self): values = [u'\u16b1', 'foo', 3] keys = [u'\u16a0', 'foo', 3] query = u"SELECT {0}".format(", ".join([u":{0}".format(key) for key in keys])) with self._connect() as conn: cur = conn.cursor() cur.execute(query, dict(zip(keys, values))) results = cur.fetchone() for val, res in zip(values, results): self.assertResultEqual(val, res) def test_string_query(self): value = u'test' query = u"SELECT '{0}'".format(value) with self._connect() as conn: cur = conn.cursor() cur.execute(query) res = cur.fetchone() self.assertEqual(value, res[0]) def test_string_named_parameter_binding(self): key = u'test' value = u'value' query = u"SELECT :{0}".format(key) with self._connect() as conn: cur = conn.cursor() cur.execute(query, {key: value}) res = cur.fetchone() self.assertResultEqual(value, res[0]) # unit test for issue #160 def test_null_named_parameter_binding(self): key = u'test' value = None query = u"SELECT :{0}".format(key) with self._connect() as conn: cur = conn.cursor() cur.execute(query, {key: value}) res = cur.fetchone() self.assertResultEqual(value, res[0]) # unit test for issue #160 def test_null_list_parameter(self): values = [u'\u00f1', 'foo', None] query = u"SELECT {0}".format(", ".join(["%s"] * len(values))) with self._connect() as conn: cur = conn.cursor() cur.execute(query, tuple(values)) results = cur.fetchone() for val, res in zip(values, results): self.assertResultEqual(val, res) vertica-python-0.7.3/vertica_python/vertica/000077500000000000000000000000001312155705000211675ustar00rootroot00000000000000vertica-python-0.7.3/vertica_python/vertica/__init__.py000066400000000000000000000000001312155705000232660ustar00rootroot00000000000000vertica-python-0.7.3/vertica_python/vertica/column.py000066400000000000000000000141171312155705000230420ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import import re from collections import namedtuple from datetime import date, datetime from decimal import Decimal import pytz # noinspection PyCompatibility,PyUnresolvedReferences from builtins import str from dateutil import parser from .. import errors from ..compat import as_str, as_text YEARS_RE = re.compile(r"^([0-9]+)-") UTF_8 = 'utf-8' # these methods are bad... # # a few timestamp with tz examples: # 2013-01-01 00:00:00 # 2013-01-01 00:00:00+00 # 2013-01-01 00:00:00.01+00 # 2013-01-01 00:00:00.00001+00 # # Vertica stores all data in UTC: # "TIMESTAMP WITH TIMEZONE (TIMESTAMPTZ) data is stored in GMT (UTC) by # converting data from the current local time zone to GMT." # Vertica fetches data in local timezone: # "When TIMESTAMPTZ data is used, data is converted back to use the current # local time zone" # If vertica boxes are on UTC, you should never have a non +00 offset (as # far as I can tell) ie. inserting '2013-01-01 00:00:00.01 EST' to a # timestamptz type stores: 2013-01-01 05:00:00.01+00 # select t AT TIMEZONE 'America/New_York' returns: 2012-12-31 19:00:00.01 def timestamp_parse(s): s = as_str(s) try: dt = _timestamp_parse(s) except ValueError: # Value error, year might be over 9999 year_match = YEARS_RE.match(s) if year_match: year = year_match.groups()[0] dt = _timestamp_parse_without_year(s[len(year) + 1:]) dt = dt.replace(year=min(int(year), 9999)) else: raise errors.DataError('Timestamp value not supported: %s' % s) return dt def _timestamp_parse(s): if len(s) == 19: return datetime.strptime(s, '%Y-%m-%d %H:%M:%S') return datetime.strptime(s, '%Y-%m-%d %H:%M:%S.%f') def _timestamp_parse_without_year(s): if len(s) == 14: return datetime.strptime(s, '%m-%d %H:%M:%S') return datetime.strptime(s, '%m-%d %H:%M:%S.%f') def timestamp_tz_parse(s): s = as_str(s) # if timezone is simply UTC... if s.endswith('+00'): # remove time zone ts = timestamp_parse(s[:-3].encode(encoding=UTF_8, errors='strict')) ts = ts.replace(tzinfo=pytz.UTC) return ts # other wise do a real parse (slower) return parser.parse(s) def date_parse(s): """ Parses value of a DATE type. :param s: string to parse into date :return: an instance of datetime.date :raises NotSupportedError when a date Before Christ is encountered """ s = as_str(s) if s.endswith(' BC'): raise errors.NotSupportedError('Dates Before Christ are not supported. Got: {0}'.format(s)) # Value error, year might be over 9999 return date(*map(lambda x: min(int(x), 9999), s.split('-'))) ColumnTuple = namedtuple('Column', ['name', 'type_code', 'display_size', 'internal_size', 'precision', 'scale', 'null_ok']) class Column(object): def __init__(self, col, unicode_error=None): self.name = col['name'].decode() self.type_code = col['data_type_oid'] self.display_size = None self.internal_size = col['data_type_size'] self.precision = None self.scale = None self.null_ok = None self.unicode_error = unicode_error self.data_type_conversions = Column._data_type_conversions(unicode_error=self.unicode_error) # WORKAROUND: Treat LONGVARCHAR as VARCHAR if self.type_code == 115: self.type_code = 9 # Mark type_code as unspecified if not within known data types if self.type_code >= len(self.data_type_conversions): self.type_code = 0 # self.props = ColumnTuple(col['name'], col['data_type_oid'], None, col['data_type_size'], # None, None, None) self.props = ColumnTuple(self.name, self.type_code, None, col['data_type_size'], None, None, None) # self.converter = self.data_type_conversions[col['data_type_oid']][1] self.converter = self.data_type_conversions[self.type_code][1] # things that are actually sent # self.name = col['name'] # self.data_type = self.data_type_conversions[col['data_type_oid']][0] # self.type_modifier = col['type_modifier'] # self.format = 'text' if col['format_code'] == 0 else 'binary' # self.table_oid = col['table_oid'] # self.attribute_number = col['attribute_number'] # self.size = col['data_type_size'] @classmethod def _data_type_conversions(cls, unicode_error=None): if unicode_error is None: unicode_error = 'strict' return [ ('unspecified', None), ('tuple', None), ('pos', None), ('record', None), ('unknown', None), ('bool', lambda s: 't' == str(s, encoding=UTF_8, errors=unicode_error)), ('integer', lambda s: int(s)), ('float', lambda s: float(s)), ('char', lambda s: str(s, encoding=UTF_8, errors=unicode_error)), ('varchar', lambda s: str(s, encoding=UTF_8, errors=unicode_error)), ('date', date_parse), ('time', None), ('timestamp', timestamp_parse), ('timestamp_tz', timestamp_tz_parse), ('interval', None), ('time_tz', None), ('numeric', lambda s: Decimal(str(s, encoding=UTF_8, errors=unicode_error))), ('bytea', None), ('rle_tuple', None), ] @classmethod def data_types(cls): return tuple([name for name, value in cls._data_type_conversions()]) def convert(self, s): if s is None: return return self.converter(s) if self.converter is not None else s def __str__(self): return as_str(str(self.props)) def __unicode__(self): return as_text(str(self.props)) def __repr__(self): return as_str(str(self.props)) def __iter__(self): for prop in self.props: yield prop def __getitem__(self, key): return self.props[key] vertica-python-0.7.3/vertica_python/vertica/connection.py000066400000000000000000000231171312155705000237040ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import import logging import socket import ssl from struct import unpack # noinspection PyCompatibility,PyUnresolvedReferences from builtins import str from six import raise_from from .. import errors from ..vertica import messages from ..vertica.cursor import Cursor from ..vertica.messages.message import BackendMessage, FrontendMessage from ..vertica.messages.frontend_messages import CancelRequest logger = logging.getLogger('vertica') ASCII = 'ascii' def connect(**kwargs): """Opens a new connection to a Vertica database.""" return Connection(kwargs) class Connection(object): def __init__(self, options=None): self.parameters = {} self.session_id = None self.backend_pid = None self.backend_key = None self.transaction_status = None self.socket = None options = options or {} self.options = {key: value for key, value in options.items() if value is not None} # we only support one cursor per connection self.options.setdefault('unicode_error', None) self._cursor = Cursor(self, None, unicode_error=self.options['unicode_error']) self.options.setdefault('port', 5433) self.options.setdefault('read_timeout', 600) self.startup_connection() def __enter__(self): return self def __exit__(self, type_, value, traceback): try: # if there's no outstanding transaction, we can simply close the connection if self.transaction_status in (None, 'in_transaction'): return if type_ is not None: self.rollback() else: self.commit() finally: self.close() ############################################# # dbapi methods ############################################# def close(self): try: self.write(messages.Terminate()) finally: self.close_socket() def cancel(self): if self.closed(): raise errors.ConnectionError('Connection is closed') self.write(CancelRequest(backend_pid=self.backend_pid, backend_key=self.backend_key)) def commit(self): if self.closed(): raise errors.ConnectionError('Connection is closed') cur = self.cursor() cur.execute('COMMIT;') def rollback(self): if self.closed(): raise errors.ConnectionError('Connection is closed') cur = self.cursor() cur.execute('ROLLBACK;') def cursor(self, cursor_type=None): if self.closed(): raise errors.ConnectionError('Connection is closed') if self._cursor.closed(): self._cursor._closed = False # let user change type if they want? self._cursor.cursor_type = cursor_type return self._cursor ############################################# # internal ############################################# def reset_values(self): self.parameters = {} self.session_id = None self.backend_pid = None self.backend_key = None self.transaction_status = None self.socket = None def _socket(self): if self.socket is not None: return self.socket host = self.options.get('host') port = self.options.get('port') connection_timeout = self.options.get('connection_timeout') raw_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) if connection_timeout is not None: raw_socket.settimeout(connection_timeout) raw_socket.connect((host, port)) ssl_options = self.options.get('ssl') if ssl_options is not None and ssl_options is not False: from ssl import CertificateError, SSLError raw_socket.sendall(messages.SslRequest().get_message()) response = raw_socket.recv(1) if response in ('S', b'S'): try: if isinstance(ssl_options, ssl.SSLContext): raw_socket = ssl_options.wrap_socket(raw_socket, server_hostname=host) else: raw_socket = ssl.wrap_socket(raw_socket) except CertificateError as e: raise_from(errors.ConnectionError, e) except SSLError as e: raise_from(errors.ConnectionError, e) else: raise errors.SSLNotSupported("SSL requested but not supported by server") self.socket = raw_socket return self.socket def ssl(self): return self.socket is not None and isinstance(self.socket, ssl.SSLSocket) def opened(self): return (self.socket is not None and self.backend_pid is not None and self.transaction_status is not None) def closed(self): return not self.opened() def write(self, message): if not isinstance(message, FrontendMessage): raise TypeError("invalid message: ({0})".format(message)) logger.debug('=> %s', message) try: for data in message.fetch_message(): try: self._socket().sendall(data) except Exception: logger.error("couldn't send message") raise except Exception as e: self.close_socket() if str(e) == 'unsupported authentication method: 9': raise errors.ConnectionError( 'Error during authentication. Your password might be expired.') else: # noinspection PyTypeChecker raise_from(errors.ConnectionError, e) def close_socket(self): try: if self.socket is not None: self._socket().close() finally: self.reset_values() def reset_connection(self): self.close() self.startup_connection() def read_message(self): try: type_ = self.read_bytes(1) size = unpack('!I', self.read_bytes(4))[0] if size < 4: raise errors.MessageError("Bad message size: {0}".format(size)) message = BackendMessage.from_type(type_, self.read_bytes(size - 4)) logger.debug('<= %s', message) return message except (SystemError, IOError) as e: self.close_socket() # noinspection PyTypeChecker raise_from(errors.ConnectionError, e) def process_message(self, message): if isinstance(message, messages.ErrorResponse): raise errors.ConnectionError(message.error_message()) elif isinstance(message, messages.NoticeResponse): if getattr(self, 'notice_handler', None) is not None: self.notice_handler(message) elif isinstance(message, messages.BackendKeyData): self.backend_pid = message.pid self.backend_key = message.key elif isinstance(message, messages.ParameterStatus): self.parameters[message.name] = message.value elif isinstance(message, messages.ReadyForQuery): self.transaction_status = message.transaction_status elif isinstance(message, messages.CommandComplete): # TODO: I'm not ever seeing this actually returned by vertica... # if vertica returns a row count, set the rowcount attribute in cursor # if hasattr(message, 'rows'): # self.cursor.rowcount = message.rows pass elif isinstance(message, messages.EmptyQueryResponse): pass elif isinstance(message, messages.CopyInResponse): pass else: raise errors.MessageError("Unhandled message: {0}".format(message)) # set last message self._cursor._message = message def __str__(self): safe_options = {key: value for key, value in self.options.items() if key != 'password'} s1 = "({0}\\.)?{0})" u"\\s*\\(\\s*(?P{0}(\\s*,\\s*{0})*)\\s*\\)" u"\\s+VALUES\\s*\\(\\s*(?P.*)\\s*\\)").format(RE_NAME) class Cursor(object): # NOTE: this is used in executemany and is here for pandas compatibility _insert_statement = re.compile(RE_BASIC_INSERT_STAT, re.U | re.I) def __init__(self, connection, cursor_type=None, unicode_error=None): self.connection = connection self.cursor_type = cursor_type self.unicode_error = unicode_error if unicode_error is not None else 'strict' self._closed = False self._message = None self.operation = None self.error = None # # dbapi properties # self.description = None self.rowcount = -1 self.arraysize = 1 ############################################# # supporting `with` statements ############################################# def __enter__(self): return self def __exit__(self, type_, value, traceback): self.close() ############################################# # dbapi methods ############################################# # noinspection PyMethodMayBeStatic def callproc(self, procname, parameters=None): raise errors.NotSupportedError('Cursor.callproc() is not implemented') def close(self): self._closed = True def cancel(self): if self.closed(): raise errors.Error('Cursor is closed') self.connection.close() def execute(self, operation, parameters=None): self.operation = as_text(operation) if self.closed(): raise errors.Error('Cursor is closed') self.flush_to_query_ready() if parameters: # TODO: quote = True for backward compatibility. see if should be False. operation = self.format_operation_with_parameters(operation, parameters) self.rowcount = -1 self.connection.write(messages.Query(operation)) # read messages until we hit an Error, DataRow or ReadyForQuery self._message = self.connection.read_message() while True: if isinstance(self._message, messages.ErrorResponse): raise errors.QueryError.from_error_response(self._message, operation) elif isinstance(self._message, messages.RowDescription): self.description = [Column(fd, self.unicode_error) for fd in self._message.fields] elif isinstance(self._message, messages.DataRow): break elif isinstance(self._message, messages.ReadyForQuery): break elif isinstance(self._message, messages.CommandComplete): break else: self.connection.process_message(self._message) self._message = self.connection.read_message() def executemany(self, operation, seq_of_parameters): operation = as_text(operation) if not isinstance(seq_of_parameters, (list, tuple)): raise TypeError("seq_of_parameters should be list/tuple") m = self._insert_statement.match(operation) if m: target = as_text(m.group('target')) variables = as_text(m.group('variables')) variables = ",".join([variable.strip().strip('"') for variable in variables.split(",")]) values = as_text(m.group('values')) values = ",".join([value.strip().strip('"') for value in values.split(",")]) seq_of_values = [self.format_operation_with_parameters(values, parameters, is_csv=True) for parameters in seq_of_parameters] data = "\n".join(seq_of_values) copy_statement = ( u"COPY {0} ({1}) FROM STDIN DELIMITER ',' ENCLOSED BY '\"' " u"ENFORCELENGTH ABORT ON ERROR").format(target, variables) self.copy(copy_statement, data) else: raise NotImplementedError( "executemany is implemented for simple INSERT statements only") def fetchone(self): while True: if isinstance(self._message, messages.DataRow): if self.rowcount == -1: self.rowcount = 1 else: self.rowcount += 1 row = self.row_formatter(self._message) # fetch next message self._message = self.connection.read_message() return row elif isinstance(self._message, messages.ReadyForQuery): return None elif isinstance(self._message, messages.CommandComplete): return None else: self.connection.process_message(self._message) self._message = self.connection.read_message() def iterate(self): row = self.fetchone() while row: yield row row = self.fetchone() def fetchmany(self, size=None): if not size: size = self.arraysize results = [] while True: row = self.fetchone() if not row: break results.append(row) if len(results) >= size: break return results def fetchall(self): return list(self.iterate()) def nextset(self): # skip any data for this set if exists self.flush_to_command_complete() if self._message is None: return False elif isinstance(self._message, messages.CommandComplete): # there might be another set, read next message to find out self._message = self.connection.read_message() if isinstance(self._message, messages.RowDescription): # next row will be either a DataRow or CommandComplete self._message = self.connection.read_message() return True elif isinstance(self._message, messages.ReadyForQuery): return False elif isinstance(self._message, messages.ErrorResponse): raise errors.QueryError.from_error_response(self._message, self.operation) else: raise errors.Error( 'Unexpected nextset() state after CommandComplete: {0}'.format(self._message)) elif isinstance(self._message, messages.ReadyForQuery): # no more sets left to be read return False else: raise errors.Error('Unexpected nextset() state: {0}'.format(self._message)) def setinputsizes(self, sizes): pass def setoutputsize(self, size, column=None): pass ############################################# # non-dbapi methods ############################################# def flush_to_query_ready(self): # if the last message isn't empty or ReadyForQuery, read all remaining messages if self._message is None \ or isinstance(self._message, messages.ReadyForQuery): return while True: message = self.connection.read_message() if isinstance(message, messages.ReadyForQuery): self.connection.transaction_status = message.transaction_status self._message = message break def flush_to_command_complete(self): # if the last message isn't empty or CommandComplete, read messages until it is if self._message is None or isinstance(self._message, (messages.ReadyForQuery, messages.CommandComplete)): return while True: message = self.connection.read_message() if isinstance(message, messages.CommandComplete): self._message = message break def copy(self, sql, data, **kwargs): """ EXAMPLE: >> with open("/tmp/file.csv", "rb") as fs: >> cursor.copy("COPY table(field1,field2) FROM STDIN DELIMITER ',' ENCLOSED BY ''''", >> fs, buffer_size=65536) """ sql = as_text(sql) if self.closed(): raise errors.Error('Cursor is closed') self.flush_to_query_ready() if isinstance(data, binary_type): stream = BytesIO(data) elif isinstance(data, text_type): stream = StringIO(data) elif isinstance(data, file_type): stream = data else: raise TypeError("Not valid type of data {0}".format(type(data))) self.connection.write(messages.Query(sql)) while True: message = self.connection.read_message() if isinstance(message, messages.ErrorResponse): raise errors.QueryError.from_error_response(message, sql) self.connection.process_message(message=message) if isinstance(message, messages.ReadyForQuery): break elif isinstance(message, messages.CopyInResponse): self.connection.write(messages.CopyStream(stream, **kwargs)) self.connection.write(messages.CopyDone()) if self.error is not None: raise self.error def closed(self): return self._closed or self.connection.closed() ############################################# # internal ############################################# def row_formatter(self, row_data): if self.cursor_type is None: return self.format_row_as_array(row_data) elif self.cursor_type in (list, 'list'): return self.format_row_as_array(row_data) elif self.cursor_type in (dict, 'dict'): return self.format_row_as_dict(row_data) else: raise TypeError('Unrecognized cursor_type: {0}'.format(self.cursor_type)) def format_row_as_dict(self, row_data): return OrderedDict( (self.description[idx].name, self.description[idx].convert(value)) for idx, value in enumerate(row_data.values) ) def format_row_as_array(self, row_data): return [self.description[idx].convert(value) for idx, value in enumerate(row_data.values)] # noinspection PyArgumentList def format_operation_with_parameters(self, operation, parameters, is_csv=False): operation = as_text(operation) if isinstance(parameters, dict): for key, param in six.iteritems(parameters): if not isinstance(key, string_types): key = str(key) key = as_text(key) if isinstance(param, string_types): param = self.format_quote(as_text(param), is_csv) elif param is None: param = NULL else: param = str(param) value = as_text(param) # Using a regex with word boundary to correctly handle params with similar names # such as :s and :start match_str = u":{0}\\b".format(key) operation = re.sub(match_str, value, operation, flags=re.U) elif isinstance(parameters, (tuple, list)): tlist = [] for param in parameters: if isinstance(param, string_types): param = self.format_quote(as_text(param), is_csv) elif param is None: param = NULL else: param = str(param) value = as_text(param) tlist.append(value) operation = operation % tuple(tlist) else: raise errors.Error("Argument 'parameters' must be dict or tuple") return operation def format_quote(self, param, is_csv): # TODO Make sure adapt() behaves properly if is_csv: return '"{0}"'.format(re.escape(param)) else: return QuotedString(param.encode(UTF_8, self.unicode_error)).getquoted() vertica-python-0.7.3/vertica_python/vertica/messages/000077500000000000000000000000001312155705000227765ustar00rootroot00000000000000vertica-python-0.7.3/vertica_python/vertica/messages/__init__.py000066400000000000000000000004511312155705000251070ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..messages import backend_messages from ..messages.backend_messages import * from ..messages import frontend_messages from ..messages.frontend_messages import * __all__ = backend_messages.__all__ + frontend_messages.__all__ vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/000077500000000000000000000000001312155705000262545ustar00rootroot00000000000000vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/__init__.py000066400000000000000000000022401312155705000303630ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from .authentication import Authentication from .backend_key_data import BackendKeyData from .bind_complete import BindComplete from .close_complete import CloseComplete from .command_complete import CommandComplete from .copy_in_response import CopyInResponse from .data_row import DataRow from .empty_query_response import EmptyQueryResponse from .error_response import ErrorResponse from .no_data import NoData from .notice_response import NoticeResponse from .parameter_description import ParameterDescription from .parameter_status import ParameterStatus from .parse_complete import ParseComplete from .portal_suspended import PortalSuspended from .ready_for_query import ReadyForQuery from .row_description import RowDescription from .unknown import Unknown __all__ = ['RowDescription', 'ReadyForQuery', 'PortalSuspended', 'ParseComplete', 'ParameterStatus', 'NoticeResponse', 'NoData', 'ErrorResponse', 'EmptyQueryResponse', 'DataRow', 'CopyInResponse', 'CommandComplete', 'CloseComplete', 'BindComplete', 'Authentication', 'BackendKeyData', 'ParameterDescription', 'Unknown'] vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/authentication.py000066400000000000000000000014141312155705000316450ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import unpack from ..message import BackendMessage class Authentication(BackendMessage): message_id = b'R' OK = 0 KERBEROS_V5 = 2 CLEARTEXT_PASSWORD = 3 CRYPT_PASSWORD = 4 MD5_PASSWORD = 5 SCM_CREDENTIAL = 6 GSS = 7 GSS_CONTINUE = 8 SSPI = 9 def __init__(self, data): BackendMessage.__init__(self) unpacked = unpack('!I{0}s'.format(len(data) - 4), data) self.code = unpacked[0] other = unpacked[1::][0] if self.code in [self.CRYPT_PASSWORD, self.MD5_PASSWORD]: self.salt = other if self.code in [self.GSS_CONTINUE]: self.auth_data = other BackendMessage.register(Authentication) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/backend_key_data.py000066400000000000000000000006241312155705000320600ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import unpack from ..message import BackendMessage class BackendKeyData(BackendMessage): message_id = b'K' def __init__(self, data): BackendMessage.__init__(self) unpacked = unpack('!2I', data) self.pid = unpacked[0] self.key = unpacked[1] BackendMessage.register(BackendKeyData) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/bind_complete.py000066400000000000000000000003131312155705000314270ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BackendMessage class BindComplete(BackendMessage): message_id = b'2' BackendMessage.register(BindComplete) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/close_complete.py000066400000000000000000000003151312155705000316220ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BackendMessage class CloseComplete(BackendMessage): message_id = b'3' BackendMessage.register(CloseComplete) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/command_complete.py000066400000000000000000000016571312155705000321450ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import import re from struct import unpack from ..message import BackendMessage class CommandComplete(BackendMessage): message_id = b'C' def __init__(self, data): BackendMessage.__init__(self) data = unpack('{0}sx'.format(len(data) - 1), data)[0] if re.match(b"INSERT", data) is not None: splitstr = data.split(b' ', 3) self.tag = splitstr[0] if len(splitstr) >= 2: self.oid = int(splitstr[1]) if len(splitstr) >= 3: self.rows = int(splitstr[2]) elif re.match(b"(DELETE|UPDATE|MOVE|FETCH|COPY)", data) is not None: splitstr = data.split(b' ', 2) self.tag = splitstr[0] if len(splitstr) >= 2: self.rows = int(splitstr[1]) else: self.tag = data BackendMessage.register(CommandComplete) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/copy_in_response.py000066400000000000000000000006761312155705000322150ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import unpack from ..message import BackendMessage class CopyInResponse(BackendMessage): message_id = b'G' def __init__(self, data): BackendMessage.__init__(self) values = unpack('!B{0}H'.format((len(data) - 1) // 2), data) self.format = values[0] self.column_formats = values[2::] BackendMessage.register(CopyInResponse) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/data_row.py000066400000000000000000000014201312155705000304230ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import unpack, unpack_from from six.moves import range from ..message import BackendMessage class DataRow(BackendMessage): message_id = b'D' def __init__(self, data): BackendMessage.__init__(self) self.values = [] field_count = unpack('!H', data[0:2])[0] pos = 2 for i in range(field_count): size = unpack_from('!I', data, pos)[0] if size == 4294967295: size = -1 if size == -1: self.values.append(None) else: self.values.append(unpack_from('{0}s'.format(size), data, pos + 4)[0]) pos += (4 + max(size, 0)) BackendMessage.register(DataRow) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/empty_query_response.py000066400000000000000000000004721312155705000331320ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BackendMessage class EmptyQueryResponse(BackendMessage): message_id = b'I' def __init__(self, data=None): BackendMessage.__init__(self) self.data = data BackendMessage.register(EmptyQueryResponse) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/error_response.py000066400000000000000000000004711312155705000316770ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BackendMessage from vertica_python.vertica.messages.backend_messages.notice_response import NoticeResponse class ErrorResponse(NoticeResponse, BackendMessage): message_id = b'E' BackendMessage.register(ErrorResponse) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/no_data.py000066400000000000000000000002771312155705000302410ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BackendMessage class NoData(BackendMessage): message_id = b'n' BackendMessage.register(NoData) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/notice_response.py000066400000000000000000000044251312155705000320320ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import unpack_from from ..message import BackendMessage class NoticeResponse(BackendMessage): message_id = b'N' FIELDS_DEFINITIONS = [ {'type': b'q', 'name': "Internal Query", 'method': 'internal_query'}, {'type': b'S', 'name': "Severity", 'method': 'severity'}, {'type': b'M', 'name': "Message", 'method': 'message'}, {'type': b'C', 'name': "Sqlstate", 'method': 'sqlstate'}, {'type': b'D', 'name': "Detail", 'method': 'detail'}, {'type': b'H', 'name': "Hint", 'method': 'hint'}, {'type': b'P', 'name': "Position", 'method': 'position'}, {'type': b'W', 'name': "Where", 'method': 'where'}, {'type': b'p', 'name': "Internal Position", 'method': 'internal_position'}, {'type': b'R', 'name': "Routine", 'method': 'routine'}, {'type': b'F', 'name': "File", 'method': 'file'}, {'type': b'L', 'name': "Line", 'method': 'line'} ] def __init__(self, data): BackendMessage.__init__(self) self.values = {} pos = 0 while pos < len(data) - 1: null_byte = data.find(b'\x00', pos) # This will probably work unpacked = unpack_from('c{0}sx'.format(null_byte - 1 - pos), data, pos) key = unpacked[0] value = unpacked[1] self.values[self.fields()[key]] = value pos += (len(value) + 2) # May want to break out into a function at some point for field_def in self.FIELDS_DEFINITIONS: if self.values.get(field_def['name'], None) is not None: setattr(self, field_def['method'], self.values[field_def['name']]) def fields(self): # was FIELDS before # TODO verify that it doesn't break anything pairs = [] for field in self.FIELDS_DEFINITIONS: pairs.append((field['type'], field['name'])) return dict(pairs) def error_message(self): ordered = [] for field in self.FIELDS_DEFINITIONS: if self.values.get(field['name']) is not None: ordered.append("{0}: {1}".format(field['name'], self.values[field['name']])) return ', '.join(ordered) BackendMessage.register(NoticeResponse) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/parameter_description.py000066400000000000000000000011261312155705000332110ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import unpack, unpack_from from ..message import BackendMessage from vertica_python.vertica.column import Column class ParameterDescription(BackendMessage): message_id = b't' def __init__(self, data): BackendMessage.__init__(self) parameter_count = unpack('!H', data)[0] parameter_type_ids = unpack_from("!{0}N".format(parameter_count), data, 2) self.parameter_types = [Column.data_types()[dtid] for dtid in parameter_type_ids] BackendMessage.register(ParameterDescription) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/parameter_status.py000066400000000000000000000007701312155705000322150ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import unpack from ..message import BackendMessage class ParameterStatus(BackendMessage): message_id = b'S' def __init__(self, data): BackendMessage.__init__(self) null_byte = data.find(b'\x00') unpacked = unpack('{0}sx{1}sx'.format(null_byte - 1, len(data) - null_byte - 1), data) self.name = unpacked[0] self.value = unpacked[1] BackendMessage.register(ParameterStatus) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/parse_complete.py000066400000000000000000000003151312155705000316270ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BackendMessage class ParseComplete(BackendMessage): message_id = b'1' BackendMessage.register(ParseComplete) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/portal_suspended.py000066400000000000000000000003211312155705000321750ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BackendMessage class PortalSuspended(BackendMessage): message_id = b's' BackendMessage.register(PortalSuspended) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/ready_for_query.py000066400000000000000000000007561312155705000320350ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import unpack from ..message import BackendMessage class ReadyForQuery(BackendMessage): message_id = b'Z' STATUSES = { b'I': 'no_transaction', b'T': 'in_transaction', b'E': 'failed_transaction' } def __init__(self, data): BackendMessage.__init__(self) self.transaction_status = self.STATUSES[unpack('c', data)[0]] BackendMessage.register(ReadyForQuery) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/row_description.py000066400000000000000000000020051312155705000320350ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import unpack, unpack_from from six.moves import range from ..message import BackendMessage class RowDescription(BackendMessage): message_id = b'T' def __init__(self, data): BackendMessage.__init__(self) self.fields = [] field_count = unpack('!H', data[0:2])[0] pos = 2 for i in range(field_count): field_info = unpack_from("!{0}sxIHIHIH".format(data.find(b'\x00', pos) - pos), data, pos) self.fields.append({ 'name': field_info[0], 'table_oid': field_info[1], 'attribute_number': field_info[2], 'data_type_oid': field_info[3], 'data_type_size': field_info[4], 'type_modifier': field_info[5], 'format_code': field_info[6], }) pos += 19 + len(field_info[0]) BackendMessage.register(RowDescription) vertica-python-0.7.3/vertica_python/vertica/messages/backend_messages/unknown.py000066400000000000000000000005401312155705000303240ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BackendMessage class Unknown(BackendMessage): def __init__(self, message_id, data): BackendMessage.__init__(self) self._message_id = message_id self.data = data @property def message_id(self): return self._message_id vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/000077500000000000000000000000001312155705000265045ustar00rootroot00000000000000vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/__init__.py000066400000000000000000000014411312155705000306150ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from .bind import Bind from .cancel_request import CancelRequest from .close import Close from .copy_data import CopyData from .copy_stream import CopyStream from .copy_done import CopyDone from .copy_fail import CopyFail from .describe import Describe from .execute import Execute from .flush import Flush from .parse import Parse from .password import Password from .query import Query from .ssl_request import SslRequest from .startup import Startup from .sync import Sync from .terminate import Terminate __all__ = ['Bind', 'Query', 'CancelRequest', 'Close', 'CopyData', 'CopyDone', 'CopyFail', 'CopyStream', 'Describe', 'Execute', 'Flush', 'Parse', 'Terminate', 'Password', 'SslRequest', 'Startup', 'Sync'] vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/bind.py000066400000000000000000000017241312155705000277760ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import pack from ..message import BulkFrontendMessage class Bind(BulkFrontendMessage): message_id = b'B' def __init__(self, portal_name, prepared_statement_name, parameter_values): BulkFrontendMessage.__init__(self) self._portal_name = portal_name self._prepared_statement_name = prepared_statement_name self._parameter_values = parameter_values def read_bytes(self): bytes_ = pack('!{0}sx{1}sxHH'.format( len(self._portal_name), len(self._prepared_statement_name)), self._portal_name, self._prepared_statement_name, 0, len(self._parameter_values)) for val in self._parameter_values.values(): if val is None: bytes_ += pack('!I', [-1]) else: bytes_ += pack('!I{0}s'.format(len(val)), len(val), val) bytes_ += pack('!H', [0]) return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/cancel_request.py000066400000000000000000000007631312155705000320610ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import pack from ..message import BulkFrontendMessage class CancelRequest(BulkFrontendMessage): message_id = None def __init__(self, backend_pid, backend_key): BulkFrontendMessage.__init__(self) self._backend_pid = backend_pid self._backend_key = backend_key def read_bytes(self): bytes_ = pack('!3I', 80877102, self._backend_pid, self._backend_key) return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/close.py000066400000000000000000000014341312155705000301650ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import pack from ..message import BulkFrontendMessage class Close(BulkFrontendMessage): message_id = b'C' def __init__(self, close_type, close_name): BulkFrontendMessage.__init__(self) self._close_name = close_name if close_type == 'portal': self._close_type = 'P' elif close_type == 'prepared_statement': self._close_type = 'S' else: raise ValueError("{0} is not a valid close_type. " "Must be either portal or prepared_statement".format(close_type)) def read_bytes(self): bytes_ = pack('c{0}sx'.format(len(self._close_name)), self._close_type, self._close_name) return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/copy_data.py000066400000000000000000000013641312155705000310250ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from six import text_type, binary_type from ..message import BulkFrontendMessage UTF_8 = 'utf-8' class CopyData(BulkFrontendMessage): message_id = b'd' def __init__(self, data, unicode_error='strict'): BulkFrontendMessage.__init__(self) self._unicode_error = unicode_error if isinstance(data, text_type): self._data = self._data.encode(encoding=UTF_8, errors=self._unicode_error) elif isinstance(data, binary_type): self._data = data else: raise TypeError("should be string or bytes") def read_bytes(self): # to deal with unicode strings bytes_ = self._data return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/copy_done.py000066400000000000000000000002521312155705000310340ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BulkFrontendMessage class CopyDone(BulkFrontendMessage): message_id = b'c' vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/copy_fail.py000066400000000000000000000007071312155705000310270ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import pack from ..message import BulkFrontendMessage class CopyFail(BulkFrontendMessage): message_id = b'f' def __init__(self, error_message): BulkFrontendMessage.__init__(self) self._error_message = error_message def read_bytes(self): bytes_ = pack('{0}sx'.format(len(self._error_message)), self._error_message) return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/copy_stream.py000066400000000000000000000017221312155705000314050ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from six import text_type, binary_type from ..message import StreamFrontendMessage DEFAULT_BUFFER_SIZE = 131072 UTF_8 = 'utf-8' class CopyStream(StreamFrontendMessage): message_id = b'd' def __init__(self, stream, buffer_size=DEFAULT_BUFFER_SIZE, unicode_error='strict'): StreamFrontendMessage.__init__(self) self._stream = stream self._unicode_error = unicode_error self._buffer_size = buffer_size def stream_bytes(self): while True: chunk = self._stream.read(self._buffer_size) if isinstance(chunk, text_type): bytes_ = chunk.encode(encoding=UTF_8, errors=self._unicode_error) elif isinstance(chunk, binary_type): bytes_ = chunk else: raise TypeError("should be string or bytes") if not chunk: break yield bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/crypt_windows.py000077500000000000000000000206031312155705000317750ustar00rootroot00000000000000#!/usr/bin/env python from __future__ import print_function, division, absolute_import from six.moves import range # Initial permutation IP = ( 58, 50, 42, 34, 26, 18, 10, 2, 60, 52, 44, 36, 28, 20, 12, 4, 62, 54, 46, 38, 30, 22, 14, 6, 64, 56, 48, 40, 32, 24, 16, 8, 57, 49, 41, 33, 25, 17, 9, 1, 59, 51, 43, 35, 27, 19, 11, 3, 61, 53, 45, 37, 29, 21, 13, 5, 63, 55, 47, 39, 31, 23, 15, 7, ) # Final permutation, FP = IP^(-1) FP = ( 40, 8, 48, 16, 56, 24, 64, 32, 39, 7, 47, 15, 55, 23, 63, 31, 38, 6, 46, 14, 54, 22, 62, 30, 37, 5, 45, 13, 53, 21, 61, 29, 36, 4, 44, 12, 52, 20, 60, 28, 35, 3, 43, 11, 51, 19, 59, 27, 34, 2, 42, 10, 50, 18, 58, 26, 33, 1, 41, 9, 49, 17, 57, 25, ) # Permuted-choice 1 from the key bits to yield C and D. # Note that bits 8,16... are left out: They are intended for a parity check. PC1_C = ( 57, 49, 41, 33, 25, 17, 9, 1, 58, 50, 42, 34, 26, 18, 10, 2, 59, 51, 43, 35, 27, 19, 11, 3, 60, 52, 44, 36, ) PC1_D = ( 63, 55, 47, 39, 31, 23, 15, 7, 62, 54, 46, 38, 30, 22, 14, 6, 61, 53, 45, 37, 29, 21, 13, 5, 28, 20, 12, 4, ) # Permuted-choice 2, to pick out the bits from the CD array that generate the # key schedule. PC2_C = ( 14, 17, 11, 24, 1, 5, 3, 28, 15, 6, 21, 10, 23, 19, 12, 4, 26, 8, 16, 7, 27, 20, 13, 2, ) PC2_D = ( 41, 52, 31, 37, 47, 55, 30, 40, 51, 45, 33, 48, 44, 49, 39, 56, 34, 53, 46, 42, 50, 36, 29, 32, ) # The C and D arrays are used to calculate the key schedule. C = [0] * 28 D = [0] * 28 # The key schedule. Generated from the key. KS = [[0] * 48 for _ in range(16)] # The E bit-selection table. E = [0] * 48 e2 = ( 32, 1, 2, 3, 4, 5, 4, 5, 6, 7, 8, 9, 8, 9, 10, 11, 12, 13, 12, 13, 14, 15, 16, 17, 16, 17, 18, 19, 20, 21, 20, 21, 22, 23, 24, 25, 24, 25, 26, 27, 28, 29, 28, 29, 30, 31, 32, 1, ) # S-boxes. S = ( ( 14, 4, 13, 1, 2, 15, 11, 8, 3, 10, 6, 12, 5, 9, 0, 7, 0, 15, 7, 4, 14, 2, 13, 1, 10, 6, 12, 11, 9, 5, 3, 8, 4, 1, 14, 8, 13, 6, 2, 11, 15, 12, 9, 7, 3, 10, 5, 0, 15, 12, 8, 2, 4, 9, 1, 7, 5, 11, 3, 14, 10, 0, 6, 13 ), ( 15, 1, 8, 14, 6, 11, 3, 4, 9, 7, 2, 13, 12, 0, 5, 10, 3, 13, 4, 7, 15, 2, 8, 14, 12, 0, 1, 10, 6, 9, 11, 5, 0, 14, 7, 11, 10, 4, 13, 1, 5, 8, 12, 6, 9, 3, 2, 15, 13, 8, 10, 1, 3, 15, 4, 2, 11, 6, 7, 12, 0, 5, 14, 9 ), ( 10, 0, 9, 14, 6, 3, 15, 5, 1, 13, 12, 7, 11, 4, 2, 8, 13, 7, 0, 9, 3, 4, 6, 10, 2, 8, 5, 14, 12, 11, 15, 1, 13, 6, 4, 9, 8, 15, 3, 0, 11, 1, 2, 12, 5, 10, 14, 7, 1, 10, 13, 0, 6, 9, 8, 7, 4, 15, 14, 3, 11, 5, 2, 12 ), ( 7, 13, 14, 3, 0, 6, 9, 10, 1, 2, 8, 5, 11, 12, 4, 15, 13, 8, 11, 5, 6, 15, 0, 3, 4, 7, 2, 12, 1, 10, 14, 9, 10, 6, 9, 0, 12, 11, 7, 13, 15, 1, 3, 14, 5, 2, 8, 4, 3, 15, 0, 6, 10, 1, 13, 8, 9, 4, 5, 11, 12, 7, 2, 14 ), ( 2, 12, 4, 1, 7, 10, 11, 6, 8, 5, 3, 15, 13, 0, 14, 9, 14, 11, 2, 12, 4, 7, 13, 1, 5, 0, 15, 10, 3, 9, 8, 6, 4, 2, 1, 11, 10, 13, 7, 8, 15, 9, 12, 5, 6, 3, 0, 14, 11, 8, 12, 7, 1, 14, 2, 13, 6, 15, 0, 9, 10, 4, 5, 3 ), ( 12, 1, 10, 15, 9, 2, 6, 8, 0, 13, 3, 4, 14, 7, 5, 11, 10, 15, 4, 2, 7, 12, 9, 5, 6, 1, 13, 14, 0, 11, 3, 8, 9, 14, 15, 5, 2, 8, 12, 3, 7, 0, 4, 10, 1, 13, 11, 6, 4, 3, 2, 12, 9, 5, 15, 10, 11, 14, 1, 7, 6, 0, 8, 13 ), ( 4, 11, 2, 14, 15, 0, 8, 13, 3, 12, 9, 7, 5, 10, 6, 1, 13, 0, 11, 7, 4, 9, 1, 10, 14, 3, 5, 12, 2, 15, 8, 6, 1, 4, 11, 13, 12, 3, 7, 14, 10, 15, 6, 8, 0, 5, 9, 2, 6, 11, 13, 8, 1, 4, 10, 7, 9, 5, 0, 15, 14, 2, 3, 12 ), ( 13, 2, 8, 4, 6, 15, 11, 1, 10, 9, 3, 14, 5, 0, 12, 7, 1, 15, 13, 8, 10, 3, 7, 4, 12, 5, 6, 11, 0, 14, 9, 2, 7, 11, 4, 1, 9, 12, 14, 2, 0, 6, 10, 13, 15, 3, 5, 8, 2, 1, 14, 7, 4, 10, 8, 13, 15, 12, 9, 0, 3, 5, 6, 11 ) ) # P is a permutation on the selected combination of the current L and key. P = ( 16, 7, 20, 21, 29, 12, 28, 17, 1, 15, 23, 26, 5, 18, 31, 10, 2, 8, 24, 14, 32, 27, 3, 9, 19, 13, 30, 6, 22, 11, 4, 25, ) # The combination of the key and the input, before selection. preS = [0] * 48 def __setkey(key): """ Set up the key schedule from the encryption key. """ global C, D, KS, E shifts = (1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1) # First, generate C and D by permuting the key. The lower order bit of each # 8-bit char is not used, so C and D are only 28 bits apiece. for i in range(28): C[i] = key[PC1_C[i] - 1] D[i] = key[PC1_D[i] - 1] for i in range(16): # rotate for k in range(shifts[i]): temp = C[0] for j in range(27): C[j] = C[j + 1] C[27] = temp temp = D[0] for j in range(27): D[j] = D[j + 1] D[27] = temp # get Ki. Note C and D are concatenated for j in range(24): KS[i][j] = C[PC2_C[j] - 1] KS[i][j + 24] = D[PC2_D[j] - 28 - 1] # load E with the initial E bit selections for i in range(48): E[i] = e2[i] def __encrypt(block): global preS left, right = [], [] # block in two halves f = [0] * 32 # First, permute the bits in the input for j in range(32): left.append(block[IP[j] - 1]) for j in range(32, 64): right.append(block[IP[j] - 1]) # Perform an encryption operation 16 times. for i in range(16): # Save the right array, which will be the new left. old = right[:] # Expand right to 48 bits using the E selector and exclusive-or with # the current key bits. for j in range(48): preS[j] = right[E[j] - 1] ^ KS[i][j] # The pre-select bits are now considered in 8 groups of 6 bits each. # The 8 selection functions map these 6-bit quantities into 4-bit # quantities and the results are permuted to make an f(R, K). # The indexing into the selection functions is peculiar; it could be # simplified by rewriting the tables. for j in range(8): temp = 6 * j k = S[j][(preS[temp + 0] << 5) + (preS[temp + 1] << 3) + (preS[temp + 2] << 2) + (preS[temp + 3] << 1) + (preS[temp + 4] << 0) + (preS[temp + 5] << 4)] temp = 4 * j f[temp + 0] = (k >> 3) & 1 f[temp + 1] = (k >> 2) & 1 f[temp + 2] = (k >> 1) & 1 f[temp + 3] = (k >> 0) & 1 # The new right is left ^ f(R, K). # The f here has to be permuted first, though. for j in range(32): right[j] = left[j] ^ f[P[j] - 1] # Finally the new left (the original right) is copied back. left = old # The output left and right are reversed. left, right = right, left # The final output gets the inverse permutation of the very original for j in range(64): i = FP[j] if i < 33: block[j] = left[i - 1] else: block[j] = right[i - 33] return block def crypt(pw, salt): iobuf = [] # break pw into 64 bits block = [] for c in pw: c = ord(c) for j in range(7): block.append((c >> (6 - j)) & 1) block.append(0) block += [0] * (64 - len(block)) # set key based on pw __setkey(block) for i in range(2): # store salt at beginning of results iobuf.append(salt[i]) c = ord(salt[i]) if c > ord('Z'): c -= 6 if c > ord('9'): c -= 7 c -= ord('.') # use salt to effect the E-bit selection for j in range(6): if (c >> j) & 1: E[6 * i + j], E[6 * i + j + 24] = E[6 * i + j + 24], E[6 * i + j] # call DES encryption 25 times using pw as key and initial data = 0 block = [0] * 66 for i in range(25): block = __encrypt(block) # format encrypted block for standard crypt(3) output for i in range(11): c = 0 for j in range(6): c <<= 1 c |= block[6 * i + j] c += ord('.') if c > ord('9'): c += 7 if c > ord('Z'): c += 6 iobuf.append(chr(c)) return ''.join(iobuf) vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/describe.py000066400000000000000000000015341312155705000306410ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import pack from ..message import BulkFrontendMessage class Describe(BulkFrontendMessage): message_id = b'D' def __init__(self, describe_type, describe_name): BulkFrontendMessage.__init__(self) self._describe_name = describe_name if describe_type == 'portal': self._describe_type = 'P' elif describe_type == 'prepared_statement': self._describe_type = 'S' else: raise ValueError("{0} is not a valid describe_type. " "Must be either portal or prepared_statement".format(describe_type)) def read_bytes(self): bytes_ = pack('c{0}sx'.format(len(self._describe_name)), self._describe_type, self._describe_name) return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/execute.py000066400000000000000000000007721312155705000305260ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import pack from ..message import BulkFrontendMessage class Execute(BulkFrontendMessage): message_id = b'E' def __init__(self, portal_name, max_rows): BulkFrontendMessage.__init__(self) self._portal_name = portal_name self._max_rows = max_rows def read_bytes(self): bytes_ = pack('!{0}sxI'.format(len(self._portal_name)), self._portal_name, self._max_rows) return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/flush.py000066400000000000000000000002461312155705000302010ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BulkFrontendMessage class Flush(BulkFrontendMessage): message_id = b'H' vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/parse.py000066400000000000000000000013711312155705000301720ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import pack from ..message import BulkFrontendMessage class Parse(BulkFrontendMessage): message_id = b'P' def __init__(self, name, query, param_types): BulkFrontendMessage.__init__(self) self._name = name self._query = query self._param_types = param_types def read_bytes(self): params = "" for param in self._param_types: params = params + param bytes_ = pack('!{0}sx{1}sxH{2}I'.format(len(self._name), len(self._query), len(self._param_types)), self._name, self._query, len(self._param_types), params) return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/password.py000066400000000000000000000037661312155705000307340ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import import os import hashlib from struct import pack import six from ..message import BulkFrontendMessage from ..backend_messages.authentication import Authentication if os.name == 'nt': from . import crypt_windows as crypt else: import crypt ASCII = 'ascii' class Password(BulkFrontendMessage): message_id = b'p' def __init__(self, password, auth_method=None, options=None): BulkFrontendMessage.__init__(self) self._password = password self._options = options or {} if auth_method is not None: self._auth_method = auth_method else: self._auth_method = Authentication.CLEARTEXT_PASSWORD def encoded_password(self): if self._auth_method == Authentication.CLEARTEXT_PASSWORD: return self._password elif self._auth_method == Authentication.CRYPT_PASSWORD: return crypt.crypt(self._password, self._options['salt']) elif self._auth_method == Authentication.MD5_PASSWORD: for key in 'user', 'salt': m = hashlib.md5() m.update(self._password + self._options[key]) hexdigest = m.hexdigest() if six.PY3: # In python3 the output of m.hexdigest() is a unicode string, # so has to be converted to bytes before concat'ing with # the password bytes. hexdigest = bytes(hexdigest, ASCII) self._password = hexdigest prefix = 'md5' if six.PY3: # Same workaround for bytes here. prefix = bytes(prefix, ASCII) return prefix + self._password else: raise ValueError("unsupported authentication method: {0}".format(self._auth_method)) def read_bytes(self): encoded_pw = self.encoded_password() bytes_ = pack('{0}sx'.format(len(encoded_pw)), encoded_pw) return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/query.py000066400000000000000000000007551312155705000302320ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import pack from ..message import BulkFrontendMessage UTF_8 = 'utf-8' class Query(BulkFrontendMessage): message_id = b'Q' def __init__(self, query_string): BulkFrontendMessage.__init__(self) self._query_string = query_string def read_bytes(self): encoded = self._query_string.encode(UTF_8) bytes_ = pack('{0}sx'.format(len(encoded)), encoded) return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/ssl_request.py000066400000000000000000000004761312155705000314360ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from struct import pack from ..message import BulkFrontendMessage class SslRequest(BulkFrontendMessage): message_id = None SSL_REQUEST = 80877103 def read_bytes(self): bytes_ = pack('!I', self.SSL_REQUEST) return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/startup.py000066400000000000000000000034571312155705000305710ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import import platform import os import uuid from struct import pack # noinspection PyUnresolvedReferences,PyCompatibility from builtins import str import vertica_python from ..message import BulkFrontendMessage ASCII = 'ascii' class Startup(BulkFrontendMessage): message_id = None def __init__(self, user, database, options=None): BulkFrontendMessage.__init__(self) self._user = user self._database = database self._options = options self._type = b'vertica-python' self._version = vertica_python.__version__.encode(ASCII) self._platform = platform.platform().encode(ASCII) self._pid = '{0}'.format(os.getpid()).encode(ASCII) self._label = self._type + b'-' + self._version + b'-' + str(uuid.uuid1()).encode(ASCII) def read_bytes(self): bytes_ = pack('!I', vertica_python.PROTOCOL_VERSION) if self._user is not None: bytes_ += pack('4sx{0}sx'.format(len(self._user)), b'user', self._user) if self._database is not None: bytes_ += pack('8sx{0}sx'.format(len(self._database)), b'database', self._database) if self._options is not None: bytes_ += pack('7sx{0}sx'.format(len(self._options)), b'options', self._options) bytes_ += pack('12sx{0}sx'.format(len(self._label)), b'client_label', self._label) bytes_ += pack('11sx{0}sx'.format(len(self._type)), b'client_type', self._type) bytes_ += pack('14sx{0}sx'.format(len(self._version)), b'client_version', self._version) bytes_ += pack('9sx{0}sx'.format(len(self._platform)), b'client_os', self._platform) bytes_ += pack('10sx{0}sx'.format(len(self._pid)), b'client_pid', self._pid) bytes_ += pack('x') return bytes_ vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/sync.py000066400000000000000000000002451312155705000300330ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BulkFrontendMessage class Sync(BulkFrontendMessage): message_id = b'S' vertica-python-0.7.3/vertica_python/vertica/messages/frontend_messages/terminate.py000066400000000000000000000002521312155705000310450ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from ..message import BulkFrontendMessage class Terminate(BulkFrontendMessage): message_id = b'X' vertica-python-0.7.3/vertica_python/vertica/messages/message.py000066400000000000000000000051071312155705000247770ustar00rootroot00000000000000from __future__ import print_function, division, absolute_import from abc import ABCMeta from struct import pack from ..messages import * class Message(object): __metaclass__ = ABCMeta def __init__(self): pass @property def message_id(self): raise NotImplementedError("no default message_id") def _bytes_to_message(self, msg): if isinstance(msg, list): msg = ''.join(msg) if hasattr(msg, 'bytesize'): bytesize = msg.bytesize + 4 else: bytesize = len(msg) + 4 message_size = pack('!I', bytesize) if self.message_id is not None: msg_with_size = self.message_id + message_size + msg else: msg_with_size = message_size + msg return msg_with_size # noinspection PyAbstractClass class BackendMessage(Message): __metaclass__ = ABCMeta _message_id_map = {} @classmethod def from_type(cls, type_, data): klass = cls._message_id_map.get(type_) if klass is not None: return klass(data) else: from .backend_messages import Unknown return Unknown(type_, data) @staticmethod def register(cls): # TODO replace _message_id() with that assert issubclass(cls, BackendMessage), \ "{0} is not subclass of BackendMessage".format(cls.__name__) assert cls.message_id not in BackendMessage._message_id_map, \ "can't write the same key twice: {0}".format(cls.message_id) BackendMessage._message_id_map[cls.message_id] = cls # noinspection PyAbstractClass class FrontendMessage(Message): __metaclass__ = ABCMeta def fetch_message(self): """Generator for getting the message's content""" raise NotImplementedError("fetch_bytes has no default implementation") # noinspection PyAbstractClass class BulkFrontendMessage(FrontendMessage): __metaclass__ = ABCMeta def read_bytes(self): return b'' def get_message(self): bytes_ = self.read_bytes() return self._bytes_to_message(bytes_) def fetch_message(self): yield self.get_message() # noinspection PyAbstractClass class StreamFrontendMessage(FrontendMessage): __metaclass__ = ABCMeta def stream_bytes(self): raise NotImplementedError("stream_bytes has no default implementation") def stream_message(self): for bytes_ in self.stream_bytes(): yield self._bytes_to_message(bytes_) def fetch_message(self): for message in self.stream_message(): yield message