" % (
self.months, self.days, self.microseconds)
def __eq__(self, other):
return other is not None and isinstance(other, Interval) and \
self.months == other.months and self.days == other.days and \
self.microseconds == other.microseconds
def __neq__(self, other):
return not self.__eq__(other)
def pack_funcs(fmt):
struc = Struct('!' + fmt)
return struc.pack, struc.unpack_from
i_pack, i_unpack = pack_funcs('i')
h_pack, h_unpack = pack_funcs('h')
q_pack, q_unpack = pack_funcs('q')
d_pack, d_unpack = pack_funcs('d')
f_pack, f_unpack = pack_funcs('f')
iii_pack, iii_unpack = pack_funcs('iii')
ii_pack, ii_unpack = pack_funcs('ii')
qii_pack, qii_unpack = pack_funcs('qii')
dii_pack, dii_unpack = pack_funcs('dii')
ihihih_pack, ihihih_unpack = pack_funcs('ihihih')
ci_pack, ci_unpack = pack_funcs('ci')
bh_pack, bh_unpack = pack_funcs('bh')
cccc_pack, cccc_unpack = pack_funcs('cccc')
Struct('!i')
min_int2, max_int2 = -2 ** 15, 2 ** 15
min_int4, max_int4 = -2 ** 31, 2 ** 31
min_int8, max_int8 = -2 ** 63, 2 ** 63
class Warning(Exception):
"""Generic exception raised for important database warnings like data
truncations. This exception is not currently used by pg8000.
This exception is part of the `DBAPI 2.0 specification
`_.
"""
pass
class Error(Exception):
"""Generic exception that is the base exception of all other error
exceptions.
This exception is part of the `DBAPI 2.0 specification
`_.
"""
pass
class InterfaceError(Error):
"""Generic exception raised for errors that are related to the database
interface rather than the database itself. For example, if the interface
attempts to use an SSL connection but the server refuses, an InterfaceError
will be raised.
This exception is part of the `DBAPI 2.0 specification
`_.
"""
pass
class DatabaseError(Error):
"""Generic exception raised for errors that are related to the database.
This exception is currently never raised by pg8000.
This exception is part of the `DBAPI 2.0 specification
`_.
"""
pass
class DataError(DatabaseError):
"""Generic exception raised for errors that are due to problems with the
processed data. This exception is not currently raised by pg8000.
This exception is part of the `DBAPI 2.0 specification
`_.
"""
pass
class OperationalError(DatabaseError):
"""
Generic exception raised for errors that are related to the database's
operation and not necessarily under the control of the programmer. This
exception is currently never raised by pg8000.
This exception is part of the `DBAPI 2.0 specification
`_.
"""
pass
class IntegrityError(DatabaseError):
"""
Generic exception raised when the relational integrity of the database is
affected. This exception is not currently raised by pg8000.
This exception is part of the `DBAPI 2.0 specification
`_.
"""
pass
class InternalError(DatabaseError):
"""Generic exception raised when the database encounters an internal error.
This is currently only raised when unexpected state occurs in the pg8000
interface itself, and is typically the result of a interface bug.
This exception is part of the `DBAPI 2.0 specification
`_.
"""
pass
class ProgrammingError(DatabaseError):
"""Generic exception raised for programming errors. For example, this
exception is raised if more parameter fields are in a query string than
there are available parameters.
This exception is part of the `DBAPI 2.0 specification
`_.
"""
pass
class NotSupportedError(DatabaseError):
"""Generic exception raised in case a method or database API was used which
is not supported by the database.
This exception is part of the `DBAPI 2.0 specification
`_.
"""
pass
class ArrayContentNotSupportedError(NotSupportedError):
"""
Raised when attempting to transmit an array where the base type is not
supported for binary data transfer by the interface.
"""
pass
class ArrayContentNotHomogenousError(ProgrammingError):
"""
Raised when attempting to transmit an array that doesn't contain only a
single type of object.
"""
pass
class ArrayContentEmptyError(ProgrammingError):
"""Raised when attempting to transmit an empty array. The type oid of an
empty array cannot be determined, and so sending them is not permitted.
"""
pass
class ArrayDimensionsNotConsistentError(ProgrammingError):
"""
Raised when attempting to transmit an array that has inconsistent
multi-dimension sizes.
"""
pass
class Bytea(binary_type):
"""Bytea is a str-derived class that is mapped to a PostgreSQL byte array.
This class is only used in Python 2, the built-in ``bytes`` type is used in
Python 3.
"""
pass
def Date(year, month, day):
"""Constuct an object holding a date value.
This function is part of the `DBAPI 2.0 specification
`_.
:rtype: :class:`datetime.date`
"""
return datetime.date(year, month, day)
def Time(hour, minute, second):
"""Construct an object holding a time value.
This function is part of the `DBAPI 2.0 specification
`_.
:rtype: :class:`datetime.time`
"""
return datetime.time(hour, minute, second)
def Timestamp(year, month, day, hour, minute, second):
"""Construct an object holding a timestamp value.
This function is part of the `DBAPI 2.0 specification
`_.
:rtype: :class:`datetime.datetime`
"""
return datetime.datetime(year, month, day, hour, minute, second)
def DateFromTicks(ticks):
"""Construct an object holding a date value from the given ticks value
(number of seconds since the epoch).
This function is part of the `DBAPI 2.0 specification
`_.
:rtype: :class:`datetime.date`
"""
return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks):
"""Construct an objet holding a time value from the given ticks value
(number of seconds since the epoch).
This function is part of the `DBAPI 2.0 specification
`_.
:rtype: :class:`datetime.time`
"""
return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
"""Construct an object holding a timestamp value from the given ticks value
(number of seconds since the epoch).
This function is part of the `DBAPI 2.0 specification
`_.
:rtype: :class:`datetime.datetime`
"""
return Timestamp(*time.localtime(ticks)[:6])
def Binary(value):
"""Construct an object holding binary data.
This function is part of the `DBAPI 2.0 specification
`_.
:rtype: :class:`pg8000.types.Bytea` for Python 2, otherwise :class:`bytes`
"""
if PY2:
return Bytea(value)
else:
return value
if PRE_26:
bytearray = list
if PY2:
BINARY = Bytea
else:
BINARY = bytes
FC_TEXT = 0
FC_BINARY = 1
BINARY_SPACE = b(" ")
DDL_COMMANDS = b("ALTER"), b("CREATE")
def convert_paramstyle(style, query):
# I don't see any way to avoid scanning the query string char by char,
# so we might as well take that careful approach and create a
# state-based scanner. We'll use int variables for the state.
# 0 -- outside quoted string
# 1 -- inside single-quote string '...'
# 2 -- inside quoted identifier "..."
# 3 -- inside escaped single-quote string, E'...'
# 4 -- inside parameter name eg. :name
OUTSIDE = 0
INSIDE_SQ = 1
INSIDE_QI = 2
INSIDE_ES = 3
INSIDE_PN = 4
in_quote_escape = False
in_param_escape = False
placeholders = []
output_query = []
param_idx = map(lambda x: "$" + str(x), count(1))
state = OUTSIDE
prev_c = None
for i, c in enumerate(query):
if i + 1 < len(query):
next_c = query[i + 1]
else:
next_c = None
if state == OUTSIDE:
if c == "'":
output_query.append(c)
if prev_c == 'E':
state = INSIDE_ES
else:
state = INSIDE_SQ
elif c == '"':
output_query.append(c)
state = INSIDE_QI
elif style == "qmark" and c == "?":
output_query.append(next(param_idx))
elif style == "numeric" and c == ":":
output_query.append("$")
elif style == "named" and c == ":":
state = INSIDE_PN
placeholders.append('')
elif style == "pyformat" and c == '%' and next_c == "(":
state = INSIDE_PN
placeholders.append('')
elif style in ("format", "pyformat") and c == "%":
style = "format"
if in_param_escape:
in_param_escape = False
output_query.append(c)
else:
if next_c == "%":
in_param_escape = True
elif next_c == "s":
state = INSIDE_PN
output_query.append(next(param_idx))
else:
raise InterfaceError(
"Only %s and %% are supported in the query.")
else:
output_query.append(c)
elif state == INSIDE_SQ:
if c == "'":
output_query.append(c)
if in_quote_escape:
in_quote_escape = False
else:
if next_c == "'":
in_quote_escape = True
else:
state = OUTSIDE
elif style in ("pyformat", "format") and c == "%":
# hm... we're only going to support an escaped percent sign
if in_param_escape:
in_param_escape = False
output_query.append(c)
else:
if next_c == "%":
in_param_escape = True
else:
raise InterfaceError(
"'%" + next_c + "' not supported in a quoted "
"string within the query string")
else:
output_query.append(c)
elif state == INSIDE_QI:
if c == '"':
state = OUTSIDE
output_query.append(c)
elif style in ("pyformat", "format") and c == "%":
# hm... we're only going to support an escaped percent sign
if in_param_escape:
in_param_escape = False
output_query.append(c)
else:
if next_c == "%":
in_param_escape = True
else:
raise InterfaceError(
"'%" + next_c + "' not supported in a quoted "
"string within the query string")
else:
output_query.append(c)
elif state == INSIDE_ES:
if c == "'" and prev_c != "\\":
# check for escaped single-quote
output_query.append(c)
state = OUTSIDE
elif style in ("pyformat", "format") and c == "%":
# hm... we're only going to support an escaped percent sign
if in_param_escape:
in_param_escape = False
output_query.append(c)
else:
if next_c == "%":
in_param_escape = True
else:
raise InterfaceError(
"'%" + next_c + "' not supported in a quoted "
"string within the query string.")
else:
output_query.append(c)
elif state == INSIDE_PN:
if style == 'named':
placeholders[-1] += c
if next_c is None or (not next_c.isalnum() and next_c != '_'):
state = OUTSIDE
try:
pidx = placeholders.index(placeholders[-1], 0, -1)
output_query.append("$" + str(pidx + 1))
del placeholders[-1]
except ValueError:
output_query.append("$" + str(len(placeholders)))
elif style == 'pyformat':
if prev_c == ')' and c == "s":
state = OUTSIDE
try:
pidx = placeholders.index(placeholders[-1], 0, -1)
output_query.append("$" + str(pidx + 1))
del placeholders[-1]
except ValueError:
output_query.append("$" + str(len(placeholders)))
elif c in "()":
pass
else:
placeholders[-1] += c
elif style == 'format':
state = OUTSIDE
prev_c = c
if style in ('numeric', 'qmark', 'format'):
def make_args(vals):
return vals
else:
def make_args(vals):
return tuple(vals[p] for p in placeholders)
return ''.join(output_query), make_args
EPOCH = datetime.datetime(2000, 1, 1)
EPOCH_TZ = EPOCH.replace(tzinfo=utc)
EPOCH_SECONDS = timegm(EPOCH.timetuple())
utcfromtimestamp = datetime.datetime.utcfromtimestamp
INFINITY_MICROSECONDS = 2 ** 63 - 1
MINUS_INFINITY_MICROSECONDS = -1 * INFINITY_MICROSECONDS - 1
# data is 64-bit integer representing microseconds since 2000-01-01
def timestamp_recv_integer(data, offset, length):
micros = q_unpack(data, offset)[0]
try:
return EPOCH + timedelta(microseconds=micros)
except OverflowError:
if micros == INFINITY_MICROSECONDS:
return datetime.datetime.max
elif micros == MINUS_INFINITY_MICROSECONDS:
return datetime.datetime.min
else:
raise exc_info()[1]
# data is double-precision float representing seconds since 2000-01-01
def timestamp_recv_float(data, offset, length):
return utcfromtimestamp(EPOCH_SECONDS + d_unpack(data, offset)[0])
# data is 64-bit integer representing microseconds since 2000-01-01
def timestamp_send_integer(v):
if v == datetime.datetime.max:
micros = INFINITY_MICROSECONDS
elif v == datetime.datetime.min:
micros = MINUS_INFINITY_MICROSECONDS
else:
micros = int(
(timegm(v.timetuple()) - EPOCH_SECONDS) * 1e6) + v.microsecond
return q_pack(micros)
# data is double-precision float representing seconds since 2000-01-01
def timestamp_send_float(v):
return d_pack(timegm(v.timetuple) + v.microsecond / 1e6 - EPOCH_SECONDS)
def timestamptz_send_integer(v):
# timestamps should be sent as UTC. If they have zone info,
# convert them.
return timestamp_send_integer(v.astimezone(utc).replace(tzinfo=None))
def timestamptz_send_float(v):
# timestamps should be sent as UTC. If they have zone info,
# convert them.
return timestamp_send_float(v.astimezone(utc).replace(tzinfo=None))
DATETIME_MAX_TZ = datetime.datetime.max.replace(tzinfo=utc)
DATETIME_MIN_TZ = datetime.datetime.min.replace(tzinfo=utc)
# return a timezone-aware datetime instance if we're reading from a
# "timestamp with timezone" type. The timezone returned will always be
# UTC, but providing that additional information can permit conversion
# to local.
def timestamptz_recv_integer(data, offset, length):
micros = q_unpack(data, offset)[0]
try:
return EPOCH_TZ + timedelta(microseconds=micros)
except OverflowError:
if micros == INFINITY_MICROSECONDS:
return DATETIME_MAX_TZ
elif micros == MINUS_INFINITY_MICROSECONDS:
return DATETIME_MIN_TZ
else:
raise exc_info()[1]
def timestamptz_recv_float(data, offset, length):
return timestamp_recv_float(data, offset, length).replace(tzinfo=utc)
def interval_send_integer(v):
microseconds = v.microseconds
try:
microseconds += int(v.seconds * 1e6)
except AttributeError:
pass
try:
months = v.months
except AttributeError:
months = 0
return qii_pack(microseconds, v.days, months)
def interval_send_float(v):
seconds = v.microseconds / 1000.0 / 1000.0
try:
seconds += v.seconds
except AttributeError:
pass
try:
months = v.months
except AttributeError:
months = 0
return dii_pack(seconds, v.days, months)
def interval_recv_integer(data, offset, length):
microseconds, days, months = qii_unpack(data, offset)
if months == 0:
seconds, micros = divmod(microseconds, 1e6)
return datetime.timedelta(days, seconds, micros)
else:
return Interval(microseconds, days, months)
def interval_recv_float(data, offset, length):
seconds, days, months = dii_unpack(data, offset)
if months == 0:
secs, microseconds = divmod(seconds, 1e6)
return datetime.timedelta(days, secs, microseconds)
else:
return Interval(int(seconds * 1000 * 1000), days, months)
def int8_recv(data, offset, length):
return q_unpack(data, offset)[0]
def int2_recv(data, offset, length):
return h_unpack(data, offset)[0]
def int4_recv(data, offset, length):
return i_unpack(data, offset)[0]
def float4_recv(data, offset, length):
return f_unpack(data, offset)[0]
def float8_recv(data, offset, length):
return d_unpack(data, offset)[0]
def bytea_send(v):
return v
# bytea
if PY2:
def bytea_recv(data, offset, length):
return Bytea(data[offset:offset + length])
else:
def bytea_recv(data, offset, length):
return data[offset:offset + length]
def uuid_send(v):
return v.bytes
def uuid_recv(data, offset, length):
return UUID(bytes=data[offset:offset+length])
TRUE = b("\x01")
FALSE = b("\x00")
def bool_send(v):
return TRUE if v else FALSE
NULL = i_pack(-1)
NULL_BYTE = b('\x00')
def null_send(v):
return NULL
def int_in(data, offset, length):
return int(data[offset: offset + length])
class Cursor():
"""A cursor object is returned by the :meth:`~Connection.cursor` method of
a connection. It has the following attributes and methods:
.. attribute:: arraysize
This read/write attribute specifies the number of rows to fetch at a
time with :meth:`fetchmany`. It defaults to 1.
.. attribute:: connection
This read-only attribute contains a reference to the connection object
(an instance of :class:`Connection`) on which the cursor was
created.
This attribute is part of a DBAPI 2.0 extension. Accessing this
attribute will generate the following warning: ``DB-API extension
cursor.connection used``.
.. attribute:: rowcount
This read-only attribute contains the number of rows that the last
``execute()`` or ``executemany()`` method produced (for query
statements like ``SELECT``) or affected (for modification statements
like ``UPDATE``).
The value is -1 if:
- No ``execute()`` or ``executemany()`` method has been performed yet
on the cursor.
- There was no rowcount associated with the last ``execute()``.
- At least one of the statements executed as part of an
``executemany()`` had no row count associated with it.
- Using a ``SELECT`` query statement on PostgreSQL server older than
version 9.
- Using a ``COPY`` query statement on PostgreSQL server version 8.1 or
older.
This attribute is part of the `DBAPI 2.0 specification
`_.
.. attribute:: description
This read-only attribute is a sequence of 7-item sequences. Each value
contains information describing one result column. The 7 items
returned for each column are (name, type_code, display_size,
internal_size, precision, scale, null_ok). Only the first two values
are provided by the current implementation.
This attribute is part of the `DBAPI 2.0 specification
`_.
"""
def __init__(self, connection):
self._c = connection
self.arraysize = 1
self.ps = None
self._row_count = -1
self._cached_rows = deque()
self.portal_name = None
self.portal_suspended = False
@property
def connection(self):
warn("DB-API extension cursor.connection used", stacklevel=3)
return self._c
@property
def rowcount(self):
return self._row_count
description = property(lambda self: self._getDescription())
def _getDescription(self):
if self.ps is None:
return None
row_desc = self.ps['row_desc']
if len(row_desc) == 0:
return None
columns = []
for col in row_desc:
columns.append(
(col["name"], col["type_oid"], None, None, None, None, None))
return columns
##
# Executes a database operation. Parameters may be provided as a sequence
# or mapping and will be bound to variables in the operation.
#
# Stability: Part of the DBAPI 2.0 specification.
def execute(self, operation, args=None, stream=None):
"""Executes a database operation. Parameters may be provided as a
sequence, or as a mapping, depending upon the value of
:data:`pg8000.paramstyle`.
This method is part of the `DBAPI 2.0 specification
`_.
:param operation:
The SQL statement to execute.
:param args:
If :data:`paramstyle` is ``qmark``, ``numeric``, or ``format``,
this argument should be an array of parameters to bind into the
statement. If :data:`paramstyle` is ``named``, the argument should
be a dict mapping of parameters. If the :data:`paramstyle` is
``pyformat``, the argument value may be either an array or a
mapping.
:param stream: This is a pg8000 extension for use with the PostgreSQL
`COPY
`_
command. For a COPY FROM the parameter must be a readable file-like
object, and for COPY TO it must be writable.
.. versionadded:: 1.9.11
"""
try:
self._c._lock.acquire()
self.stream = stream
if not self._c.in_transaction and not self._c.autocommit:
self._c.execute(self, "begin transaction", None)
self._c.execute(self, operation, args)
except AttributeError:
if self._c is None:
raise InterfaceError("Cursor closed")
elif self._c._sock is None:
raise InterfaceError("connection is closed")
else:
raise exc_info()[1]
finally:
self._c._lock.release()
def executemany(self, operation, param_sets):
"""Prepare a database operation, and then execute it against all
parameter sequences or mappings provided.
This method is part of the `DBAPI 2.0 specification
`_.
:param operation:
The SQL statement to execute
:param parameter_sets:
A sequence of parameters to execute the statement with. The values
in the sequence should be sequences or mappings of parameters, the
same as the args argument of the :meth:`execute` method.
"""
rowcounts = []
for parameters in param_sets:
self.execute(operation, parameters)
rowcounts.append(self._row_count)
self._row_count = -1 if -1 in rowcounts else sum(rowcounts)
def fetchone(self):
"""Fetch the next row of a query result set.
This method is part of the `DBAPI 2.0 specification
`_.
:returns:
A row as a sequence of field values, or ``None`` if no more rows
are available.
"""
try:
return next(self)
except StopIteration:
return None
except TypeError:
raise ProgrammingError("attempting to use unexecuted cursor")
except AttributeError:
raise ProgrammingError("attempting to use unexecuted cursor")
def fetchmany(self, num=None):
"""Fetches the next set of rows of a query result.
This method is part of the `DBAPI 2.0 specification
`_.
:param size:
The number of rows to fetch when called. If not provided, the
:attr:`arraysize` attribute value is used instead.
:returns:
A sequence, each entry of which is a sequence of field values
making up a row. If no more rows are available, an empty sequence
will be returned.
"""
try:
return tuple(
islice(self, self.arraysize if num is None else num))
except TypeError:
raise ProgrammingError("attempting to use unexecuted cursor")
def fetchall(self):
"""Fetches all remaining rows of a query result.
This method is part of the `DBAPI 2.0 specification
`_.
:returns:
A sequence, each entry of which is a sequence of field values
making up a row.
"""
try:
return tuple(self)
except TypeError:
raise ProgrammingError("attempting to use unexecuted cursor")
def close(self):
"""Closes the cursor.
This method is part of the `DBAPI 2.0 specification
`_.
"""
self._c = None
def __iter__(self):
"""A cursor object is iterable to retrieve the rows from a query.
This is a DBAPI 2.0 extension.
"""
return self
def setinputsizes(self, sizes):
"""This method is part of the `DBAPI 2.0 specification
`_, however, it is not
implemented by pg8000.
"""
pass
def setoutputsize(self, size, column=None):
"""This method is part of the `DBAPI 2.0 specification
`_, however, it is not
implemented by pg8000.
"""
pass
def __next__(self):
try:
self._c._lock.acquire()
return self._cached_rows.popleft()
except IndexError:
if self.portal_suspended:
self._c.send_EXECUTE(self)
self._c._write(SYNC_MSG)
self._c._flush()
self._c.handle_messages(self)
if not self.portal_suspended:
self._c.close_portal(self)
try:
return self._cached_rows.popleft()
except IndexError:
if self.ps is None:
raise ProgrammingError("A query hasn't been issued.")
elif len(self.ps['row_desc']) == 0:
raise ProgrammingError("no result set")
else:
raise StopIteration()
finally:
self._c._lock.release()
if PY2:
Cursor.next = Cursor.__next__
# Message codes
NOTICE_RESPONSE = b("N")
AUTHENTICATION_REQUEST = b("R")
PARAMETER_STATUS = b("S")
BACKEND_KEY_DATA = b("K")
READY_FOR_QUERY = b("Z")
ROW_DESCRIPTION = b("T")
ERROR_RESPONSE = b("E")
DATA_ROW = b("D")
COMMAND_COMPLETE = b("C")
PARSE_COMPLETE = b("1")
BIND_COMPLETE = b("2")
CLOSE_COMPLETE = b("3")
PORTAL_SUSPENDED = b("s")
NO_DATA = b("n")
PARAMETER_DESCRIPTION = b("t")
NOTIFICATION_RESPONSE = b("A")
COPY_DONE = b("c")
COPY_DATA = b("d")
COPY_IN_RESPONSE = b("G")
COPY_OUT_RESPONSE = b("H")
EMPTY_QUERY_RESPONSE = b("I")
BIND = b("B")
PARSE = b("P")
EXECUTE = b("E")
FLUSH = b('H')
SYNC = b('S')
PASSWORD = b('p')
DESCRIBE = b('D')
TERMINATE = b('X')
CLOSE = b('C')
FLUSH_MSG = FLUSH + i_pack(4)
SYNC_MSG = SYNC + i_pack(4)
TERMINATE_MSG = TERMINATE + i_pack(4)
COPY_DONE_MSG = COPY_DONE + i_pack(4)
# DESCRIBE constants
STATEMENT = b('S')
PORTAL = b('P')
# ErrorResponse codes
RESPONSE_SEVERITY = b("S") # always present
RESPONSE_CODE = b("C") # always present
RESPONSE_MSG = b("M") # always present
RESPONSE_DETAIL = b("D")
RESPONSE_HINT = b("H")
RESPONSE_POSITION = b("P")
RESPONSE__POSITION = b("p")
RESPONSE__QUERY = b("q")
RESPONSE_WHERE = b("W")
RESPONSE_FILE = b("F")
RESPONSE_LINE = b("L")
RESPONSE_ROUTINE = b("R")
IDLE = b("I")
IDLE_IN_TRANSACTION = b("T")
IDLE_IN_FAILED_TRANSACTION = b("E")
arr_trans = dict(zip(map(ord, u("[] 'u")), list(u('{}')) + [None] * 3))
class MulticastDelegate(object):
def __init__(self):
self.delegates = []
def __iadd__(self, delegate):
self.add(delegate)
return self
def add(self, delegate):
self.delegates.append(delegate)
def __isub__(self, delegate):
self.delegates.remove(delegate)
return self
def __call__(self, *args, **kwargs):
for d in self.delegates:
d(*args, **kwargs)
class Connection(object):
"""A connection object is returned by the :func:`pg8000.connect` function.
It represents a single physical connection to a PostgreSQL database.
.. attribute:: Connection.notifies
A list of server-side notifications received by this database
connection (via the LISTEN/NOTIFY PostgreSQL commands). Each list
element is a two-element tuple containing the PostgreSQL backend PID
that issued the notify, and the notification name.
PostgreSQL will only send notifications to a client between
transactions. The contents of this property are generally only
populated after a commit or rollback of the current transaction.
This list can be modified by a client application to clean out
notifications as they are handled. However, inspecting or modifying
this collection should only be done while holding the
:attr:`notifies_lock` lock in order to guarantee thread-safety.
This attribute is not part of the DBAPI standard; it is a pg8000
extension.
.. versionadded:: 1.07
.. attribute:: Connection.notifies_lock
A :class:`threading.Lock` object that should be held to read or
modify the contents of the :attr:`notifies` list.
This attribute is not part of the DBAPI standard; it is a pg8000
extension.
.. versionadded:: 1.07
.. attribute:: Connection.autocommit
Following the DB-API specification, autocommit is off by default.
It can be turned on by setting this boolean pg8000-specific autocommit
property to True.
.. versionadded:: 1.9
.. exception:: Connection.Error
Connection.Warning
Connection.InterfaceError
Connection.DatabaseError
Connection.InternalError
Connection.OperationalError
Connection.ProgrammingError
Connection.IntegrityError
Connection.DataError
Connection.NotSupportedError
All of the standard database exception types are accessible via
connection instances.
This is a DBAPI 2.0 extension. Accessing any of these attributes will
generate the warning ``DB-API extension connection.DatabaseError
used``.
"""
# DBAPI Extension: supply exceptions as attributes on the connection
Warning = property(lambda self: self._getError(Warning))
Error = property(lambda self: self._getError(Error))
InterfaceError = property(lambda self: self._getError(InterfaceError))
DatabaseError = property(lambda self: self._getError(DatabaseError))
OperationalError = property(lambda self: self._getError(OperationalError))
IntegrityError = property(lambda self: self._getError(IntegrityError))
InternalError = property(lambda self: self._getError(InternalError))
ProgrammingError = property(lambda self: self._getError(ProgrammingError))
NotSupportedError = property(
lambda self: self._getError(NotSupportedError))
# Determines the number of rows to read from the database server at once.
# Reading more rows increases performance at the cost of memory. The
# default value is 100 rows. The effect of this parameter is transparent.
# That is, the library reads more rows when the cache is empty
# automatically.
_row_cache_size = 100
_row_cache_size_bin = i_pack(_row_cache_size)
def _getError(self, error):
warn(
"DB-API extension connection.%s used" %
error.__name__, stacklevel=3)
return error
def __init__(
self, user, host, unix_sock, port, database, password, ssl,
timeout):
self._client_encoding = "utf8"
self._commands_with_count = (
b("INSERT"), b("DELETE"), b("UPDATE"), b("MOVE"),
b("FETCH"), b("COPY"), b("SELECT"))
self._lock = threading.Lock()
if user is None:
try:
self.user = os.environ['PGUSER']
except KeyError:
try:
self.user = os.environ['USER']
except KeyError:
raise InterfaceError(
"The 'user' connection parameter was omitted, and "
"neither the PGUSER or USER environment variables "
"were set.")
else:
self.user = user
if isinstance(self.user, text_type):
self.user = self.user.encode('utf8')
self.password = password
self.autocommit = False
self._xid = None
self._caches = defaultdict(lambda: defaultdict(dict))
self.statement_number = 0
self.portal_number = 0
try:
if unix_sock is None and host is not None:
self._usock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
elif unix_sock is not None:
if not hasattr(socket, "AF_UNIX"):
raise InterfaceError(
"attempt to connect to unix socket on unsupported "
"platform")
self._usock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
else:
raise ProgrammingError(
"one of host or unix_sock must be provided")
if not PY2 and timeout is not None:
self._usock.settimeout(timeout)
if unix_sock is None and host is not None:
self._usock.connect((host, port))
elif unix_sock is not None:
self._usock.connect(unix_sock)
if ssl:
try:
self._lock.acquire()
import ssl as sslmodule
# Int32(8) - Message length, including self.
# Int32(80877103) - The SSL request code.
self._usock.sendall(ii_pack(8, 80877103))
resp = self._usock.recv(1)
if resp == b('S'):
self._usock = sslmodule.wrap_socket(self._usock)
else:
raise InterfaceError("Server refuses SSL")
except ImportError:
raise InterfaceError(
"SSL required but ssl module not available in "
"this python installation")
finally:
self._lock.release()
self._sock = self._usock.makefile(mode="rwb")
except socket.error:
self._usock.close()
raise InterfaceError("communication error", exc_info()[1])
self._flush = self._sock.flush
self._read = self._sock.read
if PRE_26:
self._write = self._sock.writelines
else:
self._write = self._sock.write
self._backend_key_data = None
##
# An event handler that is fired when the database server issues a
# notice.
# The value of this property is a MulticastDelegate. A callback
# can be added by using connection.NotificationReceived += SomeMethod.
# The method will be called with a single argument, an object that has
# properties: severity, code, msg, and possibly others (detail, hint,
# position, where, file, line, and routine). Callbacks can be removed
# with the -= operator.
#
# Stability: Added in v1.03, stability guaranteed for v1.xx.
self.NoticeReceived = MulticastDelegate()
##
# An event handler that is fired when a runtime configuration option is
# changed on the server. The value of this property is a
# MulticastDelegate. A callback can be added by using
# connection.NotificationReceived += SomeMethod. Callbacks can be
# removed with the -= operator. The method will be called with a single
# argument, an object that has properties "key" and "value".
#
# Stability: Added in v1.03, stability guaranteed for v1.xx.
self.ParameterStatusReceived = MulticastDelegate()
##
# An event handler that is fired when NOTIFY occurs for a notification
# that has been LISTEN'd for. The value of this property is a
# MulticastDelegate. A callback can be added by using
# connection.NotificationReceived += SomeMethod. The method will be
# called with a single argument, an object that has properties:
# backend_pid, condition, and additional_info. Callbacks can be
# removed with the -= operator.
#
# Stability: Added in v1.03, stability guaranteed for v1.xx.
self.NotificationReceived = MulticastDelegate()
self.ParameterStatusReceived += self.handle_PARAMETER_STATUS
def text_out(v):
return v.encode(self._client_encoding)
def time_out(v):
return v.isoformat().encode(self._client_encoding)
def date_out(v):
if v == datetime.date.max:
return 'infinity'.encode(self._client_encoding)
elif v == datetime.date.min:
return '-infinity'.encode(self._client_encoding)
else:
return v.isoformat().encode(self._client_encoding)
def unknown_out(v):
return str(v).encode(self._client_encoding)
trans_tab = dict(zip(map(ord, u('{}')), u('[]')))
glbls = {'Decimal': Decimal}
def array_in(data, idx, length):
arr = []
prev_c = None
for c in data[idx:idx+length].decode(
self._client_encoding).translate(
trans_tab).replace(u('NULL'), u('None')):
if c not in ('[', ']', ',', 'N') and prev_c in ('[', ','):
arr.extend("Decimal('")
elif c in (']', ',') and prev_c not in ('[', ']', ',', 'e'):
arr.extend("')")
arr.append(c)
prev_c = c
return eval(''.join(arr), glbls)
def array_recv(data, idx, length):
final_idx = idx + length
dim, hasnull, typeoid = iii_unpack(data, idx)
idx += 12
# get type conversion method for typeoid
conversion = self.pg_types[typeoid][1]
# Read dimension info
dim_lengths = []
for i in range(dim):
dim_lengths.append(ii_unpack(data, idx)[0])
idx += 8
# Read all array values
values = []
while idx < final_idx:
element_len, = i_unpack(data, idx)
idx += 4
if element_len == -1:
values.append(None)
else:
values.append(conversion(data, idx, element_len))
idx += element_len
# at this point, {{1,2,3},{4,5,6}}::int[][] looks like
# [1,2,3,4,5,6]. go through the dimensions and fix up the array
# contents to match expected dimensions
for length in reversed(dim_lengths[1:]):
values = list(map(list, zip(*[iter(values)] * length)))
return values
def vector_in(data, idx, length):
return eval('[' + data[idx:idx+length].decode(
self._client_encoding).replace(' ', ',') + ']')
if PY2:
def text_recv(data, offset, length):
return unicode( # noqa
data[offset: offset + length], self._client_encoding)
def bool_recv(d, o, l):
return d[o] == "\x01"
def json_in(data, offset, length):
return loads(unicode( # noqa
data[offset: offset + length], self._client_encoding))
else:
def text_recv(data, offset, length):
return str(
data[offset: offset + length], self._client_encoding)
def bool_recv(data, offset, length):
return data[offset] == 1
def json_in(data, offset, length):
return loads(
str(data[offset: offset + length], self._client_encoding))
def time_in(data, offset, length):
hour = int(data[offset:offset + 2])
minute = int(data[offset + 3:offset + 5])
sec = Decimal(
data[offset + 6:offset + length].decode(self._client_encoding))
return datetime.time(
hour, minute, int(sec), int((sec - int(sec)) * 1000000))
def date_in(data, offset, length):
year_str = data[offset:offset + 4].decode(self._client_encoding)
if year_str == 'infi':
return datetime.date.max
elif year_str == '-inf':
return datetime.date.min
else:
return datetime.date(
int(year_str), int(data[offset + 5:offset + 7]),
int(data[offset + 8:offset + 10]))
def numeric_in(data, offset, length):
return Decimal(
data[offset: offset + length].decode(self._client_encoding))
def numeric_out(d):
return str(d).encode(self._client_encoding)
self.pg_types = defaultdict(
lambda: (FC_TEXT, text_recv), {
16: (FC_BINARY, bool_recv), # boolean
17: (FC_BINARY, bytea_recv), # bytea
19: (FC_BINARY, text_recv), # name type
20: (FC_BINARY, int8_recv), # int8
21: (FC_BINARY, int2_recv), # int2
22: (FC_TEXT, vector_in), # int2vector
23: (FC_BINARY, int4_recv), # int4
25: (FC_BINARY, text_recv), # TEXT type
26: (FC_TEXT, int_in), # oid
28: (FC_TEXT, int_in), # xid
114: (FC_TEXT, json_in), # json
700: (FC_BINARY, float4_recv), # float4
701: (FC_BINARY, float8_recv), # float8
705: (FC_BINARY, text_recv), # unknown
829: (FC_TEXT, text_recv), # MACADDR type
1000: (FC_BINARY, array_recv), # BOOL[]
1003: (FC_BINARY, array_recv), # NAME[]
1005: (FC_BINARY, array_recv), # INT2[]
1007: (FC_BINARY, array_recv), # INT4[]
1009: (FC_BINARY, array_recv), # TEXT[]
1014: (FC_BINARY, array_recv), # CHAR[]
1015: (FC_BINARY, array_recv), # VARCHAR[]
1016: (FC_BINARY, array_recv), # INT8[]
1021: (FC_BINARY, array_recv), # FLOAT4[]
1022: (FC_BINARY, array_recv), # FLOAT8[]
1042: (FC_BINARY, text_recv), # CHAR type
1043: (FC_BINARY, text_recv), # VARCHAR type
1082: (FC_TEXT, date_in), # date
1083: (FC_TEXT, time_in),
1114: (FC_BINARY, timestamp_recv_float), # timestamp w/ tz
1184: (FC_BINARY, timestamptz_recv_float),
1186: (FC_BINARY, interval_recv_integer),
1231: (FC_TEXT, array_in), # NUMERIC[]
1263: (FC_BINARY, array_recv), # cstring[]
1700: (FC_TEXT, numeric_in), # NUMERIC
2275: (FC_BINARY, text_recv), # cstring
2950: (FC_BINARY, uuid_recv), # uuid
3802: (FC_TEXT, json_in), # jsonb
})
self.py_types = {
type(None): (-1, FC_BINARY, null_send), # null
bool: (16, FC_BINARY, bool_send),
int: (705, FC_TEXT, unknown_out),
float: (701, FC_BINARY, d_pack), # float8
str: (705, FC_TEXT, text_out), # unknown
datetime.date: (1082, FC_TEXT, date_out), # date
datetime.time: (1083, FC_TEXT, time_out), # time
1114: (1114, FC_BINARY, timestamp_send_integer), # timestamp
# timestamp w/ tz
1184: (1184, FC_BINARY, timestamptz_send_integer),
datetime.timedelta: (1186, FC_BINARY, interval_send_integer),
Interval: (1186, FC_BINARY, interval_send_integer),
Decimal: (1700, FC_TEXT, numeric_out), # Decimal
UUID: (2950, FC_BINARY, uuid_send), # uuid
}
self.inspect_funcs = {
datetime.datetime: self.inspect_datetime,
list: self.array_inspect,
tuple: self.array_inspect,
}
if PY2:
self.py_types[Bytea] = (17, FC_BINARY, bytea_send) # bytea
self.py_types[text_type] = (705, FC_TEXT, text_out) # unknown
self.py_types[long] = (705, FC_TEXT, unknown_out) # noqa
else:
self.py_types[bytes] = (17, FC_BINARY, bytea_send) # bytea
try:
from ipaddress import (
ip_address, IPv4Address, IPv6Address, ip_network, IPv4Network,
IPv6Network)
def inet_out(v):
return str(v).encode(self._client_encoding)
def inet_in(data, offset, length):
inet_str = data[offset: offset + length].decode(
self._client_encoding)
if '/' in inet_str:
return ip_network(inet_str, False)
else:
return ip_address(inet_str)
self.py_types[IPv4Address] = (869, FC_TEXT, inet_out) # inet
self.py_types[IPv6Address] = (869, FC_TEXT, inet_out) # inet
self.py_types[IPv4Network] = (869, FC_TEXT, inet_out) # inet
self.py_types[IPv6Network] = (869, FC_TEXT, inet_out) # inet
self.pg_types[869] = (FC_TEXT, inet_in) # inet
except ImportError:
pass
self.message_types = {
NOTICE_RESPONSE: self.handle_NOTICE_RESPONSE,
AUTHENTICATION_REQUEST: self.handle_AUTHENTICATION_REQUEST,
PARAMETER_STATUS: self.handle_PARAMETER_STATUS,
BACKEND_KEY_DATA: self.handle_BACKEND_KEY_DATA,
READY_FOR_QUERY: self.handle_READY_FOR_QUERY,
ROW_DESCRIPTION: self.handle_ROW_DESCRIPTION,
ERROR_RESPONSE: self.handle_ERROR_RESPONSE,
EMPTY_QUERY_RESPONSE: self.handle_EMPTY_QUERY_RESPONSE,
DATA_ROW: self.handle_DATA_ROW,
COMMAND_COMPLETE: self.handle_COMMAND_COMPLETE,
PARSE_COMPLETE: self.handle_PARSE_COMPLETE,
BIND_COMPLETE: self.handle_BIND_COMPLETE,
CLOSE_COMPLETE: self.handle_CLOSE_COMPLETE,
PORTAL_SUSPENDED: self.handle_PORTAL_SUSPENDED,
NO_DATA: self.handle_NO_DATA,
PARAMETER_DESCRIPTION: self.handle_PARAMETER_DESCRIPTION,
NOTIFICATION_RESPONSE: self.handle_NOTIFICATION_RESPONSE,
COPY_DONE: self.handle_COPY_DONE,
COPY_DATA: self.handle_COPY_DATA,
COPY_IN_RESPONSE: self.handle_COPY_IN_RESPONSE,
COPY_OUT_RESPONSE: self.handle_COPY_OUT_RESPONSE}
# Int32 - Message length, including self.
# Int32(196608) - Protocol version number. Version 3.0.
# Any number of key/value pairs, terminated by a zero byte:
# String - A parameter name (user, database, or options)
# String - Parameter value
protocol = 196608
val = bytearray(
i_pack(protocol) + b("user\x00") + self.user + NULL_BYTE)
if database is not None:
if isinstance(database, text_type):
database = database.encode('utf8')
val.extend(b("database\x00") + database + NULL_BYTE)
val.append(0)
self._write(i_pack(len(val) + 4))
self._write(val)
self._flush()
self._cursor = self.cursor()
try:
self._lock.acquire()
code = self.error = None
while code not in (READY_FOR_QUERY, ERROR_RESPONSE):
code, data_len = ci_unpack(self._read(5))
self.message_types[code](self._read(data_len - 4), None)
if self.error is not None:
raise self.error
except:
self._close()
raise
finally:
self._lock.release()
self.in_transaction = False
self.notifies = []
self.notifies_lock = threading.Lock()
def handle_ERROR_RESPONSE(self, data, ps):
responses = tuple(
(s[0:1], s[1:].decode(self._client_encoding)) for s in
data.split(NULL_BYTE))
msg_dict = dict(responses)
if msg_dict[RESPONSE_CODE] == "28000":
self.error = InterfaceError("md5 password authentication failed")
else:
self.error = ProgrammingError(*tuple(v for k, v in responses))
def handle_EMPTY_QUERY_RESPONSE(self, data, ps):
self.error = ProgrammingError("query was empty")
def handle_CLOSE_COMPLETE(self, data, ps):
pass
def handle_PARSE_COMPLETE(self, data, ps):
# Byte1('1') - Identifier.
# Int32(4) - Message length, including self.
pass
def handle_BIND_COMPLETE(self, data, ps):
pass
def handle_PORTAL_SUSPENDED(self, data, cursor):
cursor.portal_suspended = True
def handle_PARAMETER_DESCRIPTION(self, data, ps):
# Well, we don't really care -- we're going to send whatever we
# want and let the database deal with it. But thanks anyways!
# count = h_unpack(data)[0]
# type_oids = unpack_from("!" + "i" * count, data, 2)
pass
def handle_COPY_DONE(self, data, ps):
self._copy_done = True
def handle_COPY_OUT_RESPONSE(self, data, ps):
# Int8(1) - 0 textual, 1 binary
# Int16(2) - Number of columns
# Int16(N) - Format codes for each column (0 text, 1 binary)
is_binary, num_cols = bh_unpack(data)
# column_formats = unpack_from('!' + 'h' * num_cols, data, 3)
if ps.stream is None:
raise InterfaceError(
"An output stream is required for the COPY OUT response.")
def handle_COPY_DATA(self, data, ps):
ps.stream.write(data)
def handle_COPY_IN_RESPONSE(self, data, ps):
# Int16(2) - Number of columns
# Int16(N) - Format codes for each column (0 text, 1 binary)
is_binary, num_cols = bh_unpack(data)
# column_formats = unpack_from('!' + 'h' * num_cols, data, 3)
assert self._lock.locked()
if ps.stream is None:
raise InterfaceError(
"An input stream is required for the COPY IN response.")
if PY2:
while True:
data = ps.stream.read(8192)
if not data:
break
self._write(COPY_DATA + i_pack(len(data) + 4))
self._write(data)
self._flush()
else:
bffr = bytearray(8192)
while True:
bytes_read = ps.stream.readinto(bffr)
if bytes_read == 0:
break
self._write(COPY_DATA + i_pack(bytes_read + 4))
self._write(bffr[:bytes_read])
self._flush()
# Send CopyDone
# Byte1('c') - Identifier.
# Int32(4) - Message length, including self.
self._write(COPY_DONE_MSG)
self._write(SYNC_MSG)
self._flush()
def handle_NOTIFICATION_RESPONSE(self, data, ps):
self.NotificationReceived(data)
##
# A message sent if this connection receives a NOTIFY that it was
# LISTENing for.
#
# Stability: Added in pg8000 v1.03. When limited to accessing
# properties from a notification event dispatch, stability is
# guaranteed for v1.xx.
backend_pid = i_unpack(data)[0]
idx = 4
null = data.find(NULL_BYTE, idx) - idx
condition = data[idx:idx + null].decode("ascii")
idx += null + 1
null = data.find(NULL_BYTE, idx) - idx
# additional_info = data[idx:idx + null]
# psycopg2 compatible notification interface
try:
self.notifies_lock.acquire()
self.notifies.append((backend_pid, condition))
finally:
self.notifies_lock.release()
def cursor(self):
"""Creates a :class:`Cursor` object bound to this
connection.
This function is part of the `DBAPI 2.0 specification
`_.
"""
return Cursor(self)
def commit(self):
"""Commits the current database transaction.
This function is part of the `DBAPI 2.0 specification
`_.
"""
try:
self._lock.acquire()
self.execute(self._cursor, "commit", None)
finally:
self._lock.release()
def rollback(self):
"""Rolls back the current database transaction.
This function is part of the `DBAPI 2.0 specification
`_.
"""
try:
self._lock.acquire()
self.execute(self._cursor, "rollback", None)
finally:
self._lock.release()
def _close(self):
try:
# Byte1('X') - Identifies the message as a terminate message.
# Int32(4) - Message length, including self.
self._write(TERMINATE_MSG)
self._flush()
self._sock.close()
except AttributeError:
raise InterfaceError("connection is closed")
except ValueError:
raise InterfaceError("connection is closed")
except socket.error:
raise OperationalError(str(exc_info()[1]))
finally:
self._usock.close()
self._sock = None
def close(self):
"""Closes the database connection.
This function is part of the `DBAPI 2.0 specification
`_.
"""
try:
self._lock.acquire()
self._close()
finally:
self._lock.release()
def handle_AUTHENTICATION_REQUEST(self, data, cursor):
assert self._lock.locked()
# Int32 - An authentication code that represents different
# authentication messages:
# 0 = AuthenticationOk
# 5 = MD5 pwd
# 2 = Kerberos v5 (not supported by pg8000)
# 3 = Cleartext pwd (not supported by pg8000)
# 4 = crypt() pwd (not supported by pg8000)
# 6 = SCM credential (not supported by pg8000)
# 7 = GSSAPI (not supported by pg8000)
# 8 = GSSAPI data (not supported by pg8000)
# 9 = SSPI (not supported by pg8000)
# Some authentication messages have additional data following the
# authentication code. That data is documented in the appropriate
# class.
auth_code = i_unpack(data)[0]
if auth_code == 0:
pass
elif auth_code == 3:
if self.password is None:
raise InterfaceError(
"server requesting password authentication, but no "
"password was provided")
self._send_message(
PASSWORD, self.password.encode("ascii") + NULL_BYTE)
self._flush()
elif auth_code == 5:
##
# A message representing the backend requesting an MD5 hashed
# password response. The response will be sent as
# md5(md5(pwd + login) + salt).
# Additional message data:
# Byte4 - Hash salt.
salt = b("").join(cccc_unpack(data, 4))
if self.password is None:
raise InterfaceError(
"server requesting MD5 password authentication, but no "
"password was provided")
pwd = b("md5") + md5(
md5(self.password.encode("ascii") + self.user).
hexdigest().encode("ascii") + salt).hexdigest().encode("ascii")
# Byte1('p') - Identifies the message as a password message.
# Int32 - Message length including self.
# String - The password. Password may be encrypted.
self._send_message(PASSWORD, pwd + NULL_BYTE)
self._flush()
elif auth_code in (2, 4, 6, 7, 8, 9):
raise InterfaceError(
"Authentication method " + str(auth_code) +
" not supported by pg8000.")
else:
raise InterfaceError(
"Authentication method " + str(auth_code) +
" not recognized by pg8000.")
def handle_READY_FOR_QUERY(self, data, ps):
# Byte1 - Status indicator.
self.in_transaction = data != IDLE
def handle_BACKEND_KEY_DATA(self, data, ps):
self._backend_key_data = data
def inspect_datetime(self, value):
if value.tzinfo is None:
return self.py_types[1114] # timestamp
else:
return self.py_types[1184] # send as timestamptz
def make_params(self, values):
params = []
for value in values:
typ = type(value)
try:
params.append(self.py_types[typ])
except KeyError:
try:
params.append(self.inspect_funcs[typ](value))
except KeyError:
raise NotSupportedError(
"type " + str(exc_info()[1]) +
"not mapped to pg type")
return params
def handle_ROW_DESCRIPTION(self, data, cursor):
count = h_unpack(data)[0]
idx = 2
for i in range(count):
name = data[idx:data.find(NULL_BYTE, idx)]
idx += len(name) + 1
field = dict(
zip((
"table_oid", "column_attrnum", "type_oid", "type_size",
"type_modifier", "format"), ihihih_unpack(data, idx)))
field['name'] = name
idx += 18
cursor.ps['row_desc'].append(field)
field['pg8000_fc'], field['func'] = \
self.pg_types[field['type_oid']]
def execute(self, cursor, operation, vals):
if vals is None:
vals = ()
from . import paramstyle
cache = self._caches[paramstyle]
try:
statement, make_args = cache['statement'][operation]
except KeyError:
statement, make_args = convert_paramstyle(paramstyle, operation)
cache['statement'][operation] = statement, make_args
args = make_args(vals)
params = self.make_params(args)
key = tuple(oid for oid, x, y in params), operation
try:
ps = cache['ps'][key]
cursor.ps = ps
except KeyError:
statement_name = "pg8000_statement_" + str(self.statement_number)
self.statement_number += 1
statement_name_bin = statement_name.encode('ascii') + NULL_BYTE
ps = {
'row_desc': [],
'param_funcs': tuple(x[2] for x in params),
}
cursor.ps = ps
param_fcs = tuple(x[1] for x in params)
# Byte1('P') - Identifies the message as a Parse command.
# Int32 - Message length, including self.
# String - Prepared statement name. An empty string selects the
# unnamed prepared statement.
# String - The query string.
# Int16 - Number of parameter data types specified (can be zero).
# For each parameter:
# Int32 - The OID of the parameter data type.
val = bytearray(statement_name_bin)
val.extend(statement.encode(self._client_encoding) + NULL_BYTE)
val.extend(h_pack(len(params)))
for oid, fc, send_func in params:
# Parse message doesn't seem to handle the -1 type_oid for NULL
# values that other messages handle. So we'll provide type_oid
# 705, the PG "unknown" type.
val.extend(i_pack(705 if oid == -1 else oid))
# Byte1('D') - Identifies the message as a describe command.
# Int32 - Message length, including self.
# Byte1 - 'S' for prepared statement, 'P' for portal.
# String - The name of the item to describe.
self._send_message(PARSE, val)
self._send_message(DESCRIBE, STATEMENT + statement_name_bin)
self._write(SYNC_MSG)
try:
self._flush()
except AttributeError:
if self._sock is None:
raise InterfaceError("connection is closed")
else:
raise exc_info()[1]
self.handle_messages(cursor)
# We've got row_desc that allows us to identify what we're
# going to get back from this statement.
output_fc = tuple(
self.pg_types[f['type_oid']][0] for f in ps['row_desc'])
ps['input_funcs'] = tuple(f['func'] for f in ps['row_desc'])
# Byte1('B') - Identifies the Bind command.
# Int32 - Message length, including self.
# String - Name of the destination portal.
# String - Name of the source prepared statement.
# Int16 - Number of parameter format codes.
# For each parameter format code:
# Int16 - The parameter format code.
# Int16 - Number of parameter values.
# For each parameter value:
# Int32 - The length of the parameter value, in bytes, not
# including this length. -1 indicates a NULL parameter
# value, in which no value bytes follow.
# Byte[n] - Value of the parameter.
# Int16 - The number of result-column format codes.
# For each result-column format code:
# Int16 - The format code.
ps['bind_1'] = statement_name_bin + h_pack(len(params)) + \
pack("!" + "h" * len(param_fcs), *param_fcs) + \
h_pack(len(params))
ps['bind_2'] = h_pack(len(output_fc)) + \
pack("!" + "h" * len(output_fc), *output_fc)
cache['ps'][key] = ps
cursor._cached_rows.clear()
cursor._row_count = -1
cursor.portal_name = "pg8000_portal_" + str(self.portal_number)
self.portal_number += 1
cursor.portal_name_bin = cursor.portal_name.encode('ascii') + NULL_BYTE
cursor.execute_msg = cursor.portal_name_bin + \
Connection._row_cache_size_bin
# Byte1('B') - Identifies the Bind command.
# Int32 - Message length, including self.
# String - Name of the destination portal.
# String - Name of the source prepared statement.
# Int16 - Number of parameter format codes.
# For each parameter format code:
# Int16 - The parameter format code.
# Int16 - Number of parameter values.
# For each parameter value:
# Int32 - The length of the parameter value, in bytes, not
# including this length. -1 indicates a NULL parameter
# value, in which no value bytes follow.
# Byte[n] - Value of the parameter.
# Int16 - The number of result-column format codes.
# For each result-column format code:
# Int16 - The format code.
retval = bytearray(cursor.portal_name_bin + ps['bind_1'])
for value, send_func in zip(args, ps['param_funcs']):
if value is None:
val = NULL
else:
val = send_func(value)
retval.extend(i_pack(len(val)))
retval.extend(val)
retval.extend(ps['bind_2'])
self._send_message(BIND, retval)
self.send_EXECUTE(cursor)
self._write(SYNC_MSG)
self._flush()
self.handle_messages(cursor)
if cursor.portal_suspended:
if self.autocommit:
raise InterfaceError(
"With autocommit on, it's not possible to retrieve more "
"rows than the pg8000 cache size, as the portal is closed "
"when the transaction is closed.")
else:
self.close_portal(cursor)
def _send_message(self, code, data):
try:
self._write(code)
self._write(i_pack(len(data) + 4))
self._write(data)
self._write(FLUSH_MSG)
except ValueError:
if str(exc_info()[1]) == "write to closed file":
raise InterfaceError("connection is closed")
else:
raise exc_info()[1]
except AttributeError:
raise InterfaceError("connection is closed")
def send_EXECUTE(self, cursor):
# Byte1('E') - Identifies the message as an execute message.
# Int32 - Message length, including self.
# String - The name of the portal to execute.
# Int32 - Maximum number of rows to return, if portal
# contains a query # that returns rows.
# 0 = no limit.
cursor.portal_suspended = False
self._send_message(EXECUTE, cursor.execute_msg)
def handle_NO_DATA(self, msg, ps):
pass
def handle_COMMAND_COMPLETE(self, data, cursor):
values = data[:-1].split(BINARY_SPACE)
command = values[0]
if command in self._commands_with_count:
row_count = int(values[-1])
if cursor._row_count == -1:
cursor._row_count = row_count
else:
cursor._row_count += row_count
if command in DDL_COMMANDS:
for k in self._caches:
self._caches[k]['ps'].clear()
def handle_DATA_ROW(self, data, cursor):
data_idx = 2
row = []
for func in cursor.ps['input_funcs']:
vlen = i_unpack(data, data_idx)[0]
data_idx += 4
if vlen == -1:
row.append(None)
else:
row.append(func(data, data_idx, vlen))
data_idx += vlen
cursor._cached_rows.append(row)
def handle_messages(self, cursor):
code = self.error = None
try:
while code != READY_FOR_QUERY:
code, data_len = ci_unpack(self._read(5))
self.message_types[code](self._read(data_len - 4), cursor)
except:
self._close()
raise
if self.error is not None:
raise self.error
# Byte1('C') - Identifies the message as a close command.
# Int32 - Message length, including self.
# Byte1 - 'S' for prepared statement, 'P' for portal.
# String - The name of the item to close.
def close_portal(self, cursor):
self._send_message(CLOSE, PORTAL + cursor.portal_name_bin)
self._write(SYNC_MSG)
self._flush()
self.handle_messages(cursor)
# Byte1('N') - Identifier
# Int32 - Message length
# Any number of these, followed by a zero byte:
# Byte1 - code identifying the field type (see responseKeys)
# String - field value
def handle_NOTICE_RESPONSE(self, data, ps):
resp = dict((s[0:1], s[1:]) for s in data.split(NULL_BYTE))
self.NoticeReceived(resp)
def handle_PARAMETER_STATUS(self, data, ps):
pos = data.find(NULL_BYTE)
key, value = data[:pos], data[pos + 1:-1]
if key == b("client_encoding"):
encoding = value.decode("ascii").lower()
self._client_encoding = pg_to_py_encodings.get(encoding, encoding)
elif key == b("integer_datetimes"):
if value == b('on'):
self.py_types[1114] = (1114, FC_BINARY, timestamp_send_integer)
self.pg_types[1114] = (FC_BINARY, timestamp_recv_integer)
self.py_types[1184] = (
1184, FC_BINARY, timestamptz_send_integer)
self.pg_types[1184] = (FC_BINARY, timestamptz_recv_integer)
self.py_types[Interval] = (
1186, FC_BINARY, interval_send_integer)
self.py_types[datetime.timedelta] = (
1186, FC_BINARY, interval_send_integer)
self.pg_types[1186] = (FC_BINARY, interval_recv_integer)
else:
self.py_types[1114] = (1114, FC_BINARY, timestamp_send_float)
self.pg_types[1114] = (FC_BINARY, timestamp_recv_float)
self.py_types[1184] = (1184, FC_BINARY, timestamptz_send_float)
self.pg_types[1184] = (FC_BINARY, timestamptz_recv_float)
self.py_types[Interval] = (
1186, FC_BINARY, interval_send_float)
self.py_types[datetime.timedelta] = (
1186, FC_BINARY, interval_send_float)
self.pg_types[1186] = (FC_BINARY, interval_recv_float)
elif key == b("server_version"):
self._server_version = LooseVersion(value.decode('ascii'))
if self._server_version < LooseVersion('8.2.0'):
self._commands_with_count = (
b("INSERT"), b("DELETE"), b("UPDATE"), b("MOVE"),
b("FETCH"))
elif self._server_version < LooseVersion('9.0.0'):
self._commands_with_count = (
b("INSERT"), b("DELETE"), b("UPDATE"), b("MOVE"),
b("FETCH"), b("COPY"))
def array_inspect(self, value):
# Check if array has any values. If not, we can't determine the proper
# array oid.
first_element = array_find_first_element(value)
if first_element is None:
raise ArrayContentEmptyError("array has no values")
# supported array output
typ = type(first_element)
if issubclass(typ, integer_types):
# special int array support -- send as smallest possible array type
typ = integer_types
int2_ok, int4_ok, int8_ok = True, True, True
for v in array_flatten(value):
if v is None:
continue
if min_int2 < v < max_int2:
continue
int2_ok = False
if min_int4 < v < max_int4:
continue
int4_ok = False
if min_int8 < v < max_int8:
continue
int8_ok = False
if int2_ok:
array_oid = 1005 # INT2[]
oid, fc, send_func = (21, FC_BINARY, h_pack)
elif int4_ok:
array_oid = 1007 # INT4[]
oid, fc, send_func = (23, FC_BINARY, i_pack)
elif int8_ok:
array_oid = 1016 # INT8[]
oid, fc, send_func = (20, FC_BINARY, q_pack)
else:
raise ArrayContentNotSupportedError(
"numeric not supported as array contents")
else:
try:
oid, fc, send_func = self.make_params((first_element,))[0]
# If unknown, assume it's a string array
if oid == 705:
oid = 25
# Use binary ARRAY format to avoid having to properly
# escape text in the array literals
fc = FC_BINARY
array_oid = pg_array_types[oid]
except KeyError:
raise ArrayContentNotSupportedError(
"oid " + str(oid) + " not supported as array contents")
except NotSupportedError:
raise ArrayContentNotSupportedError(
"type " + str(typ) + " not supported as array contents")
if fc == FC_BINARY:
def send_array(arr):
# check for homogenous array
for a, i, v in walk_array(arr):
if not isinstance(v, (typ, type(None))):
raise ArrayContentNotHomogenousError(
"not all array elements are of type " + str(typ))
# check that all array dimensions are consistent
array_check_dimensions(arr)
has_null = array_has_null(arr)
dim_lengths = array_dim_lengths(arr)
data = bytearray(iii_pack(len(dim_lengths), has_null, oid))
for i in dim_lengths:
data.extend(ii_pack(i, 1))
for v in array_flatten(arr):
if v is None:
data += i_pack(-1)
else:
inner_data = send_func(v)
data += i_pack(len(inner_data))
data += inner_data
return data
else:
def send_array(arr):
for a, i, v in walk_array(arr):
if not isinstance(v, (typ, type(None))):
raise ArrayContentNotHomogenousError(
"not all array elements are of type " + str(typ))
array_check_dimensions(arr)
ar = deepcopy(arr)
for a, i, v in walk_array(ar):
if v is None:
a[i] = 'NULL'
else:
a[i] = send_func(v).decode('ascii')
return u(str(ar)).translate(arr_trans).encode('ascii')
return (array_oid, fc, send_array)
def xid(self, format_id, global_transaction_id, branch_qualifier):
"""Create a Transaction IDs (only global_transaction_id is used in pg)
format_id and branch_qualifier are not used in postgres
global_transaction_id may be any string identifier supported by
postgres returns a tuple
(format_id, global_transaction_id, branch_qualifier)"""
return (format_id, global_transaction_id, branch_qualifier)
def tpc_begin(self, xid):
"""Begins a TPC transaction with the given transaction ID xid.
This method should be called outside of a transaction (i.e. nothing may
have executed since the last .commit() or .rollback()).
Furthermore, it is an error to call .commit() or .rollback() within the
TPC transaction. A ProgrammingError is raised, if the application calls
.commit() or .rollback() during an active TPC transaction.
This function is part of the `DBAPI 2.0 specification
`_.
"""
self._xid = xid
if self.autocommit:
self.execute(self._cursor, "begin transaction", None)
def tpc_prepare(self):
"""Performs the first phase of a transaction started with .tpc_begin().
A ProgrammingError is be raised if this method is called outside of a
TPC transaction.
After calling .tpc_prepare(), no statements can be executed until
.tpc_commit() or .tpc_rollback() have been called.
This function is part of the `DBAPI 2.0 specification
`_.
"""
q = "PREPARE TRANSACTION '%s';" % (self._xid[1],)
self.execute(self._cursor, q, None)
def tpc_commit(self, xid=None):
"""When called with no arguments, .tpc_commit() commits a TPC
transaction previously prepared with .tpc_prepare().
If .tpc_commit() is called prior to .tpc_prepare(), a single phase
commit is performed. A transaction manager may choose to do this if
only a single resource is participating in the global transaction.
When called with a transaction ID xid, the database commits the given
transaction. If an invalid transaction ID is provided, a
ProgrammingError will be raised. This form should be called outside of
a transaction, and is intended for use in recovery.
On return, the TPC transaction is ended.
This function is part of the `DBAPI 2.0 specification
`_.
"""
if xid is None:
xid = self._xid
if xid is None:
raise ProgrammingError(
"Cannot tpc_commit() without a TPC transaction!")
try:
previous_autocommit_mode = self.autocommit
self.autocommit = True
if xid in self.tpc_recover():
self.execute(
self._cursor, "COMMIT PREPARED '%s';" % (xid[1], ),
None)
else:
# a single-phase commit
self.commit()
finally:
self.autocommit = previous_autocommit_mode
self._xid = None
def tpc_rollback(self, xid=None):
"""When called with no arguments, .tpc_rollback() rolls back a TPC
transaction. It may be called before or after .tpc_prepare().
When called with a transaction ID xid, it rolls back the given
transaction. If an invalid transaction ID is provided, a
ProgrammingError is raised. This form should be called outside of a
transaction, and is intended for use in recovery.
On return, the TPC transaction is ended.
This function is part of the `DBAPI 2.0 specification
`_.
"""
if xid is None:
xid = self._xid
if xid is None:
raise ProgrammingError(
"Cannot tpc_rollback() without a TPC prepared transaction!")
try:
previous_autocommit_mode = self.autocommit
self.autocommit = True
if xid in self.tpc_recover():
# a two-phase rollback
self.execute(
self._cursor, "ROLLBACK PREPARED '%s';" % (xid[1],),
None)
else:
# a single-phase rollback
self.rollback()
finally:
self.autocommit = previous_autocommit_mode
self._xid = None
def tpc_recover(self):
"""Returns a list of pending transaction IDs suitable for use with
.tpc_commit(xid) or .tpc_rollback(xid).
This function is part of the `DBAPI 2.0 specification
`_.
"""
try:
previous_autocommit_mode = self.autocommit
self.autocommit = True
curs = self.cursor()
curs.execute("select gid FROM pg_prepared_xacts")
return [self.xid(0, row[0], '') for row in curs]
finally:
self.autocommit = previous_autocommit_mode
# pg element oid -> pg array typeoid
pg_array_types = {
16: 1000,
25: 1009, # TEXT[]
701: 1022,
1700: 1231, # NUMERIC[]
}
# PostgreSQL encodings:
# http://www.postgresql.org/docs/8.3/interactive/multibyte.html
# Python encodings:
# http://www.python.org/doc/2.4/lib/standard-encodings.html
#
# Commented out encodings don't require a name change between PostgreSQL and
# Python. If the py side is None, then the encoding isn't supported.
pg_to_py_encodings = {
# Not supported:
"mule_internal": None,
"euc_tw": None,
# Name fine as-is:
# "euc_jp",
# "euc_jis_2004",
# "euc_kr",
# "gb18030",
# "gbk",
# "johab",
# "sjis",
# "shift_jis_2004",
# "uhc",
# "utf8",
# Different name:
"euc_cn": "gb2312",
"iso_8859_5": "is8859_5",
"iso_8859_6": "is8859_6",
"iso_8859_7": "is8859_7",
"iso_8859_8": "is8859_8",
"koi8": "koi8_r",
"latin1": "iso8859-1",
"latin2": "iso8859_2",
"latin3": "iso8859_3",
"latin4": "iso8859_4",
"latin5": "iso8859_9",
"latin6": "iso8859_10",
"latin7": "iso8859_13",
"latin8": "iso8859_14",
"latin9": "iso8859_15",
"sql_ascii": "ascii",
"win866": "cp886",
"win874": "cp874",
"win1250": "cp1250",
"win1251": "cp1251",
"win1252": "cp1252",
"win1253": "cp1253",
"win1254": "cp1254",
"win1255": "cp1255",
"win1256": "cp1256",
"win1257": "cp1257",
"win1258": "cp1258",
"unicode": "utf-8", # Needed for Amazon Redshift
}
def walk_array(arr):
for i, v in enumerate(arr):
if isinstance(v, list):
for a, i2, v2 in walk_array(v):
yield a, i2, v2
else:
yield arr, i, v
def array_find_first_element(arr):
for v in array_flatten(arr):
if v is not None:
return v
return None
def array_flatten(arr):
for v in arr:
if isinstance(v, list):
for v2 in array_flatten(v):
yield v2
else:
yield v
def array_check_dimensions(arr):
v0 = arr[0]
if isinstance(v0, list):
req_len = len(v0)
req_inner_lengths = array_check_dimensions(v0)
for v in arr:
inner_lengths = array_check_dimensions(v)
if len(v) != req_len or inner_lengths != req_inner_lengths:
raise ArrayDimensionsNotConsistentError(
"array dimensions not consistent")
retval = [req_len]
retval.extend(req_inner_lengths)
return retval
else:
# make sure nothing else at this level is a list
for v in arr:
if isinstance(v, list):
raise ArrayDimensionsNotConsistentError(
"array dimensions not consistent")
return []
def array_has_null(arr):
for v in array_flatten(arr):
if v is None:
return True
return False
def array_dim_lengths(arr):
v0 = arr[0]
if isinstance(v0, list):
retval = [len(v0)]
retval.extend(array_dim_lengths(v0))
else:
return [len(arr)]
return retval
pg8000-1.10.2/pg8000/six.py 0000664 0001750 0001750 00000050751 12450024610 015374 0 ustar tlocke tlocke 0000000 0000000 """Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2013 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import operator
import sys
import types
__author__ = "Benjamin Peterson "
__version__ = "1.4.1"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PRE_26 = PY2 and sys.version_info[1] < 6
IS_JYTHON = sys.platform.lower().count('java') > 0
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring, # noqa
integer_types = (int, long) # noqa
class_types = (type, types.ClassType)
text_type = unicode # noqa
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result)
# This is a bit ugly, but it avoids running this again.
delattr(tp, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _MovedItems(types.ModuleType):
"""Lazy loading of moved objects"""
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute(
"filterfalse", "itertools", "itertools", "ifilterfalse",
"filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute(
"zip_longest", "itertools", "itertools", "izip_longest",
"zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule(
"email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule(
"tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule(
"tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule(
"tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"),
MovedModule(
"urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule(
"urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule(
"urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
del attr
moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
class Module_six_moves_urllib_parse(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_parse"] = Module_six_moves_urllib_parse(
__name__ + ".moves.urllib_parse")
sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(
__name__ + ".moves.urllib.parse")
class Module_six_moves_urllib_error(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_error"] = Module_six_moves_urllib_error(
__name__ + ".moves.urllib_error")
sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(
__name__ + ".moves.urllib.error")
class Module_six_moves_urllib_request(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute(
"HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_request"] = \
Module_six_moves_urllib_request(__name__ + ".moves.urllib_request")
sys.modules[__name__ + ".moves.urllib.request"] = \
Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
class Module_six_moves_urllib_response(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_response"] = \
Module_six_moves_urllib_response(__name__ + ".moves.urllib_response")
sys.modules[__name__ + ".moves.urllib.response"] = \
Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
class Module_six_moves_urllib_robotparser(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_robotparser"] = \
Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib_robotparser")
sys.modules[__name__ + ".moves.urllib.robotparser"] = \
Module_six_moves_urllib_robotparser(
__name__ + ".moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3
namespace"""
parse = sys.modules[__name__ + ".moves.urllib_parse"]
error = sys.modules[__name__ + ".moves.urllib_error"]
request = sys.modules[__name__ + ".moves.urllib_request"]
response = sys.modules[__name__ + ".moves.urllib_response"]
robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(
__name__ + ".moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
_iterlists = "lists"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
_iterlists = "iterlists"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
def iterkeys(d, **kw):
"""Return an iterator over the keys of a dictionary."""
return iter(getattr(d, _iterkeys)(**kw))
def itervalues(d, **kw):
"""Return an iterator over the values of a dictionary."""
return iter(getattr(d, _itervalues)(**kw))
def iteritems(d, **kw):
"""Return an iterator over the (key, value) pairs of a dictionary."""
return iter(getattr(d, _iteritems)(**kw))
def iterlists(d, **kw):
"""Return an iterator over the (key, [values]) pairs of a dictionary."""
return iter(getattr(d, _iterlists)(**kw))
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
def u(s):
return unicode(s, "unicode_escape") # noqa
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
def iterbytes(buf):
return (ord(byte) for byte in buf)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
import builtins
exec_ = getattr(builtins, "exec")
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
print_ = getattr(builtins, "print")
del builtins
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
def print_(*args, **kwargs):
"""The new-style print function."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring): # noqa
data = str(data)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode): # noqa
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode): # noqa
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode): # noqa
want_unicode = True
break
if want_unicode:
newline = unicode("\n") # noqa
space = unicode(" ") # noqa
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
return meta("NewBase", bases, {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
for slots_var in orig_vars.get('__slots__', ()):
orig_vars.pop(slots_var)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
pg8000-1.10.2/versioneer.py 0000664 0001750 0001750 00000107262 12452005063 016037 0 ustar tlocke tlocke 0000000 0000000
# Version: 0.12
"""
The Versioneer
==============
* like a rocketeer, but for versions!
* https://github.com/warner/python-versioneer
* Brian Warner
* License: Public Domain
* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy
[](https://travis-ci.org/warner/python-versioneer)
This is a tool for managing a recorded version number in distutils-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
system, and maybe making new tarballs.
## Quick Install
* `pip install versioneer` to somewhere to your $PATH
* run `versioneer-installer` in your source tree: this installs `versioneer.py`
* follow the instructions below (also in the `versioneer.py` docstring)
## Version Identifiers
Source trees come from a variety of places:
* a version-control system checkout (mostly used by developers)
* a nightly tarball, produced by build automation
* a snapshot tarball, produced by a web-based VCS browser, like github's
"tarball from tag" feature
* a release tarball, produced by "setup.py sdist", distributed through PyPI
Within each source tree, the version identifier (either a string or a number,
this tool is format-agnostic) can come from a variety of places:
* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
about recent "tags" and an absolute revision-id
* the name of the directory into which the tarball was unpacked
* an expanded VCS keyword ($Id$, etc)
* a `_version.py` created by some earlier build step
For released software, the version identifier is closely related to a VCS
tag. Some projects use tag names that include more than just the version
string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
needs to strip the tag prefix to extract the version identifier. For
unreleased software (between tags), the version identifier should provide
enough information to help developers recreate the same tree, while also
giving them an idea of roughly how old the tree is (after version 1.2, before
version 1.3). Many VCS systems can report a description that captures this,
for example 'git describe --tags --dirty --always' reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
uncommitted changes.
The version identifier is used for multiple purposes:
* to allow the module to self-identify its version: `myproject.__version__`
* to choose a name and prefix for a 'setup.py sdist' tarball
## Theory of Operation
Versioneer works by adding a special `_version.py` file into your source
tree, where your `__init__.py` can import it. This `_version.py` knows how to
dynamically ask the VCS tool for version information at import time. However,
when you use "setup.py build" or "setup.py sdist", `_version.py` in the new
copy is replaced by a small static file that contains just the generated
version data.
`_version.py` also contains `$Revision$` markers, and the installation
process marks `_version.py` to have this marker rewritten with a tag name
during the "git archive" command. As a result, generated tarballs will
contain enough information to get the proper version.
## Installation
First, decide on values for the following configuration variables:
* `VCS`: the version control system you use. Currently accepts "git".
* `versionfile_source`:
A project-relative pathname into which the generated version strings should
be written. This is usually a `_version.py` next to your project's main
`__init__.py` file, so it can be imported at runtime. If your project uses
`src/myproject/__init__.py`, this should be `src/myproject/_version.py`.
This file should be checked in to your VCS as usual: the copy created below
by `setup.py versioneer` will include code that parses expanded VCS
keywords in generated tarballs. The 'build' and 'sdist' commands will
replace it with a copy that has just the calculated version string.
This must be set even if your project does not have any modules (and will
therefore never import `_version.py`), since "setup.py sdist" -based trees
still need somewhere to record the pre-calculated version strings. Anywhere
in the source tree should do. If there is a `__init__.py` next to your
`_version.py`, the `setup.py versioneer` command (described below) will
append some `__version__`-setting assignments, if they aren't already
present.
* `versionfile_build`:
Like `versionfile_source`, but relative to the build directory instead of
the source directory. These will differ when your setup.py uses
'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`,
then you will probably have `versionfile_build='myproject/_version.py'` and
`versionfile_source='src/myproject/_version.py'`.
If this is set to None, then `setup.py build` will not attempt to rewrite
any `_version.py` in the built tree. If your project does not have any
libraries (e.g. if it only builds a script), then you should use
`versionfile_build = None` and override `distutils.command.build_scripts`
to explicitly insert a copy of `versioneer.get_version()` into your
generated script.
* `tag_prefix`:
a string, like 'PROJECTNAME-', which appears at the start of all VCS tags.
If your tags look like 'myproject-1.2.0', then you should use
tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this
should be an empty string.
* `parentdir_prefix`:
a string, frequently the same as tag_prefix, which appears at the start of
all unpacked tarball filenames. If your tarball unpacks into
'myproject-1.2.0', this should be 'myproject-'.
This tool provides one script, named `versioneer-installer`. That script does
one thing: write a copy of `versioneer.py` into the current directory.
To versioneer-enable your project:
* 1: Run `versioneer-installer` to copy `versioneer.py` into the top of your
source tree.
* 2: add the following lines to the top of your `setup.py`, with the
configuration values you decided earlier:
import versioneer
versioneer.VCS = 'git'
versioneer.versionfile_source = 'src/myproject/_version.py'
versioneer.versionfile_build = 'myproject/_version.py'
versioneer.tag_prefix = '' # tags are like 1.2.0
versioneer.parentdir_prefix = 'myproject-' # dirname like 'myproject-1.2.0'
* 3: add the following arguments to the setup() call in your setup.py:
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
* 4: now run `setup.py versioneer`, which will create `_version.py`, and will
modify your `__init__.py` (if one exists next to `_version.py`) to define
`__version__` (by calling a function from `_version.py`). It will also
modify your `MANIFEST.in` to include both `versioneer.py` and the generated
`_version.py` in sdist tarballs.
* 5: commit these changes to your VCS. To make sure you won't forget,
`setup.py versioneer` will mark everything it touched for addition.
## Post-Installation Usage
Once established, all uses of your tree from a VCS checkout should get the
current version string. All generated tarballs should include an embedded
version string (so users who unpack them will not need a VCS tool installed).
If you distribute your project through PyPI, then the release process should
boil down to two steps:
* 1: git tag 1.0
* 2: python setup.py register sdist upload
If you distribute it through github (i.e. users use github to generate
tarballs with `git archive`), the process is:
* 1: git tag 1.0
* 2: git push; git push --tags
Currently, all version strings must be based upon a tag. Versioneer will
report "unknown" until your tree has at least one tag in its history. This
restriction will be fixed eventually (see issue #12).
## Version-String Flavors
Code which uses Versioneer can learn about its version string at runtime by
importing `_version` from your main `__init__.py` file and running the
`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
import the top-level `versioneer.py` and run `get_versions()`.
Both functions return a dictionary with different keys for different flavors
of the version string:
* `['version']`: condensed tag+distance+shortid+dirty identifier. For git,
this uses the output of `git describe --tags --dirty --always` but strips
the tag_prefix. For example "0.11-2-g1076c97-dirty" indicates that the tree
is like the "1076c97" commit but has uncommitted changes ("-dirty"), and
that this commit is two revisions ("-2-") beyond the "0.11" tag. For
released software (exactly equal to a known tag), the identifier will only
contain the stripped tag, e.g. "0.11".
* `['full']`: detailed revision identifier. For Git, this is the full SHA1
commit id, followed by "-dirty" if the tree contains uncommitted changes,
e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac-dirty".
Some variants are more useful than others. Including `full` in a bug report
should allow developers to reconstruct the exact code being tested (or
indicate the presence of local changes that should be shared with the
developers). `version` is suitable for display in an "about" box or a CLI
`--version` output: it can be easily compared against release notes and lists
of bugs fixed in various releases.
In the future, this will also include a
[PEP-0440](http://legacy.python.org/dev/peps/pep-0440/) -compatible flavor
(e.g. `1.2.post0.dev123`). This loses a lot of information (and has no room
for a hash-based revision id), but is safe to use in a `setup.py`
"`version=`" argument. It also enables tools like *pip* to compare version
strings and evaluate compatibility constraint declarations.
The `setup.py versioneer` command adds the following text to your
`__init__.py` to place a basic version in `YOURPROJECT.__version__`:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
* re-run `versioneer-installer` in your source tree to replace your copy of
`versioneer.py`
* edit `setup.py`, if necessary, to include any new configuration settings
indicated by the release notes
* re-run `setup.py versioneer` to replace `SRC/_version.py`
* commit any changed files
### Upgrading from 0.10 to 0.11
You must add a `versioneer.VCS = "git"` to your `setup.py` before re-running
`setup.py versioneer`. This will enable the use of additional version-control
systems (SVN, etc) in the future.
### Upgrading from 0.11 to 0.12
Nothing special.
## Future Directions
This tool is designed to make it easily extended to other version-control
systems: all VCS-specific components are in separate directories like
src/git/ . The top-level `versioneer.py` script is assembled from these
components by running make-versioneer.py . In the future, make-versioneer.py
will take a VCS name as an argument, and will construct a version of
`versioneer.py` that is specific to the given VCS. It might also take the
configuration arguments that are currently provided manually during
installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
## License
To make Versioneer easier to embed, all its code is hereby released into the
public domain. The `_version.py` that it creates is also in the public
domain.
"""
import os, sys, re, subprocess, errno
from distutils.core import Command
from distutils.command.sdist import sdist as _sdist
from distutils.command.build import build as _build
# these configuration settings will be overridden by setup.py after it
# imports us
versionfile_source = None
versionfile_build = None
tag_prefix = None
parentdir_prefix = None
VCS = None
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % args[0])
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version >= '3':
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % args[0])
return None
return stdout
LONG_VERSION_PY['git'] = '''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.12 (https://github.com/warner/python-versioneer)
# these strings will be replaced by git during git-archive
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
# these strings are filled in when 'setup.py versioneer' creates _version.py
tag_prefix = "%(TAG_PREFIX)s"
parentdir_prefix = "%(PARENTDIR_PREFIX)s"
versionfile_source = "%(VERSIONFILE_SOURCE)s"
import os, sys, re, subprocess, errno
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %%s" %% args[0])
print(e)
return None
else:
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version >= '3':
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% args[0])
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose=False):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%%s', but '%%s' doesn't start with prefix '%%s'" %%
(root, dirname, parentdir_prefix))
return None
return {"version": dirname[len(parentdir_prefix):], "full": ""}
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs,"r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
if not keywords:
return {} # keyword-finding function failed to find keywords
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs-tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return { "version": r,
"full": keywords["full"].strip() }
# no suitable tags, so we use the full revision id
if verbose:
print("no suitable tags, using full revision id")
return { "version": keywords["full"].strip(),
"full": keywords["full"].strip() }
def git_versions_from_vcs(tag_prefix, root, verbose=False):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %%s" %% root)
return {}
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
cwd=root)
if stdout is None:
return {}
if not stdout.startswith(tag_prefix):
if verbose:
print("tag '%%s' doesn't start with prefix '%%s'" %% (stdout, tag_prefix))
return {}
tag = stdout[len(tag_prefix):]
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
if tag.endswith("-dirty"):
full += "-dirty"
return {"version": tag, "full": full}
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
keywords = { "refnames": git_refnames, "full": git_full }
ver = git_versions_from_keywords(keywords, tag_prefix, verbose)
if ver:
return ver
try:
root = os.path.abspath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in range(len(versionfile_source.split(os.sep))):
root = os.path.dirname(root)
except NameError:
return default
return (git_versions_from_vcs(tag_prefix, root, verbose)
or versions_from_parentdir(parentdir_prefix, root, verbose)
or default)
'''
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs,"r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
if not keywords:
return {} # keyword-finding function failed to find keywords
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return { "version": r,
"full": keywords["full"].strip() }
# no suitable tags, so we use the full revision id
if verbose:
print("no suitable tags, using full revision id")
return { "version": keywords["full"].strip(),
"full": keywords["full"].strip() }
def git_versions_from_vcs(tag_prefix, root, verbose=False):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
return {}
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
cwd=root)
if stdout is None:
return {}
if not stdout.startswith(tag_prefix):
if verbose:
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
return {}
tag = stdout[len(tag_prefix):]
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
if tag.endswith("-dirty"):
full += "-dirty"
return {"version": tag, "full": full}
def do_vcs_install(manifest_in, versionfile_source, ipy):
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith(".pyc") or me.endswith(".pyo"):
me = os.path.splitext(me)[0] + ".py"
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose=False):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
(root, dirname, parentdir_prefix))
return None
return {"version": dirname[len(parentdir_prefix):], "full": ""}
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.12) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
version_version = '%(version)s'
version_full = '%(full)s'
def get_versions(default={}, verbose=False):
return {'version': version_version, 'full': version_full}
"""
DEFAULT = {"version": "unknown", "full": "unknown"}
def versions_from_file(filename):
versions = {}
try:
with open(filename) as f:
for line in f.readlines():
mo = re.match("version_version = '([^']+)'", line)
if mo:
versions["version"] = mo.group(1)
mo = re.match("version_full = '([^']+)'", line)
if mo:
versions["full"] = mo.group(1)
except EnvironmentError:
return {}
return versions
def write_to_version_file(filename, versions):
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % versions)
print("set %s to '%s'" % (filename, versions["version"]))
def get_root():
try:
return os.path.dirname(os.path.abspath(__file__))
except NameError:
return os.path.dirname(os.path.abspath(sys.argv[0]))
def vcs_function(vcs, suffix):
return getattr(sys.modules[__name__], '%s_%s' % (vcs, suffix), None)
def get_versions(default=DEFAULT, verbose=False):
# returns dict with two keys: 'version' and 'full'
assert versionfile_source is not None, "please set versioneer.versionfile_source"
assert tag_prefix is not None, "please set versioneer.tag_prefix"
assert parentdir_prefix is not None, "please set versioneer.parentdir_prefix"
assert VCS is not None, "please set versioneer.VCS"
# I am in versioneer.py, which must live at the top of the source tree,
# which we use to compute the root directory. py2exe/bbfreeze/non-CPython
# don't have __file__, in which case we fall back to sys.argv[0] (which
# ought to be the setup.py script). We prefer __file__ since that's more
# robust in cases where setup.py was invoked in some weird way (e.g. pip)
root = get_root()
versionfile_abs = os.path.join(root, versionfile_source)
# extract version from first of _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
get_keywords_f = vcs_function(VCS, "get_keywords")
versions_from_keywords_f = vcs_function(VCS, "versions_from_keywords")
if get_keywords_f and versions_from_keywords_f:
vcs_keywords = get_keywords_f(versionfile_abs)
ver = versions_from_keywords_f(vcs_keywords, tag_prefix)
if ver:
if verbose: print("got version from expanded keyword %s" % ver)
return ver
ver = versions_from_file(versionfile_abs)
if ver:
if verbose: print("got version from file %s %s" % (versionfile_abs,ver))
return ver
versions_from_vcs_f = vcs_function(VCS, "versions_from_vcs")
if versions_from_vcs_f:
ver = versions_from_vcs_f(tag_prefix, root, verbose)
if ver:
if verbose: print("got version from VCS %s" % ver)
return ver
ver = versions_from_parentdir(parentdir_prefix, root, verbose)
if ver:
if verbose: print("got version from parentdir %s" % ver)
return ver
if verbose: print("got version from default %s" % default)
return default
def get_version(verbose=False):
return get_versions(verbose=verbose)["version"]
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
ver = get_version(verbose=True)
print("Version is currently: %s" % ver)
class cmd_build(_build):
def run(self):
versions = get_versions(verbose=True)
_build.run(self)
# now locate _version.py in the new build/ directory and replace it
# with an updated value
if versionfile_build:
target_versionfile = os.path.join(self.build_lib, versionfile_build)
print("UPDATING %s" % target_versionfile)
os.unlink(target_versionfile)
with open(target_versionfile, "w") as f:
f.write(SHORT_VERSION_PY % versions)
if 'cx_Freeze' in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
class cmd_build_exe(_build_exe):
def run(self):
versions = get_versions(verbose=True)
target_versionfile = versionfile_source
print("UPDATING %s" % target_versionfile)
os.unlink(target_versionfile)
with open(target_versionfile, "w") as f:
f.write(SHORT_VERSION_PY % versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(versionfile_source, "w") as f:
assert VCS is not None, "please set versioneer.VCS"
LONG = LONG_VERSION_PY[VCS]
f.write(LONG % {"DOLLAR": "$",
"TAG_PREFIX": tag_prefix,
"PARENTDIR_PREFIX": parentdir_prefix,
"VERSIONFILE_SOURCE": versionfile_source,
})
class cmd_sdist(_sdist):
def run(self):
versions = get_versions(verbose=True)
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory (remembering
# that it may be a hardlink) and replace it with an updated value
target_versionfile = os.path.join(base_dir, versionfile_source)
print("UPDATING %s" % target_versionfile)
os.unlink(target_versionfile)
with open(target_versionfile, "w") as f:
f.write(SHORT_VERSION_PY % self._versioneer_generated_versions)
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
class cmd_update_files(Command):
description = "install/upgrade Versioneer files: __init__.py SRC/_version.py"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print(" creating %s" % versionfile_source)
with open(versionfile_source, "w") as f:
assert VCS is not None, "please set versioneer.VCS"
LONG = LONG_VERSION_PY[VCS]
f.write(LONG % {"DOLLAR": "$",
"TAG_PREFIX": tag_prefix,
"PARENTDIR_PREFIX": parentdir_prefix,
"VERSIONFILE_SOURCE": versionfile_source,
})
ipy = os.path.join(os.path.dirname(versionfile_source), "__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
f.write(INIT_PY_SNIPPET)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
# Make sure both the top-level "versioneer.py" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(get_root(), "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if "versioneer.py" not in simple_includes:
print(" appending 'versioneer.py' to MANIFEST.in")
with open(manifest_in, "a") as f:
f.write("include versioneer.py\n")
else:
print(" 'versioneer.py' already in MANIFEST.in")
if versionfile_source not in simple_includes:
print(" appending versionfile_source ('%s') to MANIFEST.in" %
versionfile_source)
with open(manifest_in, "a") as f:
f.write("include %s\n" % versionfile_source)
else:
print(" versionfile_source already in MANIFEST.in")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-time keyword
# substitution.
do_vcs_install(manifest_in, versionfile_source, ipy)
def get_cmdclass():
cmds = {'version': cmd_version,
'versioneer': cmd_update_files,
'build': cmd_build,
'sdist': cmd_sdist,
}
if 'cx_Freeze' in sys.modules: # cx_freeze enabled?
cmds['build_exe'] = cmd_build_exe
del cmds['build']
return cmds
pg8000-1.10.2/README.creole 0000664 0001750 0001750 00000006045 12502100102 015413 0 ustar tlocke tlocke 0000000 0000000 =pg8000
pg8000 is a pure-[[http://www.python.org/|Python]]
[[http://www.postgresql.org/|PostgreSQL]] driver that complies with
[[http://www.python.org/dev/peps/pep-0249/|DB-API 2.0]]. The driver
communicates with the database using the
[[http://www.postgresql.org/docs/9.3/static/protocol.html|PostgreSQL Backend / Frontend Protocol]].
CircleCI [[https://circleci.com/gh/mfenniak/pg8000|Build Status]]: {{https://circleci.com/gh/mfenniak/pg8000.png?style=badge|CircleCI}}
Links:
* [[http://pythonhosted.org/pg8000/|User Docs]].
* [[https://groups.google.com/forum/#!forum/pg8000|Forum]]
* [[https://github.com/mfenniak/pg8000|Code, bugs, feature requests etc.]]
=Regression Tests
To run the regression tests, install [[http://testrun.org/tox/latest/|tox]]:
{{{
pip install tox
}}}
then install all the supported Python versions (using the
[[https://launchpad.net/~fkrull/+archive/ubuntu/deadsnakes|APT Repository]] if
you're using Ubuntu. Install all the currently supported versions of PostgreSQL
(using the [[http://wiki.postgresql.org/wiki/Apt|APT Repository]] if you're
using Ubuntu. Then for each of them, enable the hstore extension by running the
SQL command:
{{{
create extension hstore;
}}}
and add a line to pg_hba.conf for the various authentication options, eg.
{{{
host pg8000_md5 all 127.0.0.1/32 md5
host pg8000_gss all 127.0.0.1/32 gss
host pg8000_password all 127.0.0.1/32 password
host all all 127.0.0.1/32 trust
}}}
Set the following environment variables for the databases, for example:
{{{
export PG8000_TEST_NAME="PG8000_TEST_9_3"
export PG8000_TEST_9_0="{'user': 'postgres', 'password': 'pw', 'port': 5432}"
export PG8000_TEST_9_1="{'user': 'postgres', 'password': 'pw', 'port': 5433}"
export PG8000_TEST_9_2="{'user': 'postgres', 'password': 'pw', 'port': 5434}"
export PG8000_TEST_9_3="{'user': 'postgres', 'password': 'pw', 'port': 5435}"
}}}
then run {{{tox}}} from the {{{pg8000}}} directory:
{{{
tox
}}}
Unfortunately, {{{tox}}} doesn't support Python 2.5, so to test CPython 2.5 and
Jython 2.5, run the {{{run_25}}} script.
==Performance Tests
To run the performance tests from the {{{pg8000}}} directory:
{{{
python -m pg8000.tests.performance
}}}
==Stress Test
There's a stress test that is run by doing:
{{{
python ./multi
}}}
The idea is to set {{{shared_buffers}}} in postgresql.conf to 128kB, and then
run the stress test, and you should get {{{no unpinned buffers}}} errors.
=Building The Documentation
The docs are written using [[http://sphinx-doc.org/|Sphinx]]. To build them,
install sphinx:
{{{
pip install sphinx
}}}
Then type:
{{{
python setup.py build_sphinx
}}}
and the docs will appear in {{{build/sphinx/html}}}.
=Doing A Release Of pg8000
Run {{{tox}}} and {{{run_25}}} to make sure all tests pass, then update
{{{doc/release_notes.rst}}} then do:
{{{
git tag -a x.y.z -m "Version x.y.z"
python setup.py register sdist bdist_wheel upload build_sphinx upload_docs
}}}
Then post a message to the forum.
pg8000-1.10.2/PKG-INFO 0000664 0001750 0001750 00000003462 12502130703 014372 0 ustar tlocke tlocke 0000000 0000000 Metadata-Version: 1.1
Name: pg8000
Version: 1.10.2
Summary: PostgreSQL interface library
Home-page: https://github.com/mfenniak/pg8000
Author: Mathieu Fenniak
Author-email: biziqe@mathieu.fenniak.net
License: BSD
Description:
pg8000
------
pg8000 is a Pure-Python interface to the PostgreSQL database engine. It is one of many PostgreSQL interfaces for the Python programming language. pg8000 is somewhat distinctive in that it is written entirely in Python and does not rely on any external libraries (such as a compiled python module, or PostgreSQL's libpq library). pg8000 supports the standard Python DB-API version 2.0.
pg8000's name comes from the belief that it is probably about the 8000th PostgreSQL interface for Python.
Keywords: postgresql dbapi
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.5
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.2
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: Implementation
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: Jython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Operating System :: OS Independent
Classifier: Topic :: Database :: Front-Ends
Classifier: Topic :: Software Development :: Libraries :: Python Modules
pg8000-1.10.2/setup.py 0000664 0001750 0001750 00000005200 12450024610 015000 0 ustar tlocke tlocke 0000000 0000000 #!/usr/bin/env python
import versioneer
versioneer.VCS = 'git'
versioneer.versionfile_source = 'pg8000/_version.py'
versioneer.versionfile_build = 'pg8000/_version.py'
versioneer.tag_prefix = ''
versioneer.parentdir_prefix = 'pg8000-'
from setuptools import setup
long_description = """\
pg8000
------
pg8000 is a Pure-Python interface to the PostgreSQL database engine. It is \
one of many PostgreSQL interfaces for the Python programming language. pg8000 \
is somewhat distinctive in that it is written entirely in Python and does not \
rely on any external libraries (such as a compiled python module, or \
PostgreSQL's libpq library). pg8000 supports the standard Python DB-API \
version 2.0.
pg8000's name comes from the belief that it is probably about the 8000th \
PostgreSQL interface for Python."""
cmdclass = dict(versioneer.get_cmdclass())
try:
from sphinx.setup_command import BuildDoc
cmdclass['build_sphinx'] = BuildDoc
except ImportError:
pass
version=versioneer.get_version()
setup(
name="pg8000",
version=version,
cmdclass=cmdclass,
description="PostgreSQL interface library",
long_description=long_description,
author="Mathieu Fenniak",
author_email="biziqe@mathieu.fenniak.net",
url="https://github.com/mfenniak/pg8000",
license="BSD",
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: Jython",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent",
"Topic :: Database :: Front-Ends",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords="postgresql dbapi",
packages = ("pg8000",),
command_options={
'build_sphinx': {
'version': ('setup.py', version),
'release': ('setup.py', version)}},
)
pg8000-1.10.2/setup.cfg 0000664 0001750 0001750 00000000110 12450024610 015102 0 ustar tlocke tlocke 0000000 0000000 [upload_docs]
upload-dir = build/sphinx/html
[bdist_wheel]
universal=1