pax_global_header 0000666 0000000 0000000 00000000064 14422527151 0014515 g ustar 00root root 0000000 0000000 52 comment=705e9400c983de5f7571d5cd824091feefda27e1
DBUtils-Release-3_0_3/ 0000775 0000000 0000000 00000000000 14422527151 0014546 5 ustar 00root root 0000000 0000000 DBUtils-Release-3_0_3/.bumpversion.cfg 0000664 0000000 0000000 00000001135 14422527151 0017656 0 ustar 00root root 0000000 0000000 [bumpversion]
current_version = 3.0.3
[bumpversion:file:setup.py]
search = __version__ = '{current_version}'
replace = __version__ = '{new_version}'
[bumpversion:file:dbutils/__init__.py]
search = __version__ = '{current_version}'
replace = __version__ = '{new_version}'
[bumpversion:file:README.md]
search = The current version {current_version}
replace = The current version {new_version}
[bumpversion:file:docs/main.rst]
search = :Version: {current_version}
search = :Version: {new_version}
[bumpversion:file:docs/main.de.rst]
search = :Version: {current_version}
search = :Version: {new_version}
DBUtils-Release-3_0_3/.codespellrc 0000664 0000000 0000000 00000000127 14422527151 0017046 0 ustar 00root root 0000000 0000000 [codespell]
skip = .git,.tox,.venv,*.de.html,*.de.rst,build,dist,local
quiet-level = 2
DBUtils-Release-3_0_3/.flake8 0000664 0000000 0000000 00000000207 14422527151 0015720 0 ustar 00root root 0000000 0000000 [flake8]
ignore = E722,W503
exclude = .git,.pytest_cache,.tox,.venv,.idea,__pycache__,build,build,dist,docs,local
max-line-length = 79
DBUtils-Release-3_0_3/.gitattributes 0000664 0000000 0000000 00000000641 14422527151 0017442 0 ustar 00root root 0000000 0000000 * text=auto eol=lf
*.bat text eol=crlf
*.config text eol=lf
*.css text eol=lf
*.html text eol=lf
*.js text eol=lf
*.prefs text
*.py text eol=lf
*.rst text eol=lf
*.sh text eol=lf
*.txt text eol=lf
*.po text eol=lf
*.pot text eol=lf
*.styl text eol=lf
*.xml text
*.gif binary
*.ico binary
*.jpg binary
*.lnk binary
*.mo binary
*.png binary
*.exe binary
*.so binary
*.ppt binary
*.pdf binary
*.gz binary
*.zip binary
DBUtils-Release-3_0_3/.github/ 0000775 0000000 0000000 00000000000 14422527151 0016106 5 ustar 00root root 0000000 0000000 DBUtils-Release-3_0_3/.github/workflows/ 0000775 0000000 0000000 00000000000 14422527151 0020143 5 ustar 00root root 0000000 0000000 DBUtils-Release-3_0_3/.github/workflows/publish_on_pypi.yml 0000664 0000000 0000000 00000001326 14422527151 0024073 0 ustar 00root root 0000000 0000000 name: Publish DBUtils on PyPI
on:
push:
tags:
- 'Release-*'
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python: ['3.10']
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
- name: Build source tarball
if: matrix.python == 3.10
run: python setup.py sdist
- name: Build wheel
run: |
pip install wheel
python setup.py bdist_wheel
- name: Publish distribution to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_TOKEN }}
DBUtils-Release-3_0_3/.github/workflows/test_with_tox.yml 0000664 0000000 0000000 00000001002 14422527151 0023563 0 ustar 00root root 0000000 0000000 name: Test DBUtils using tox
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python: ['3.7', '3.8', '3.9', '3.10', '3.11']
steps:
- uses: actions/checkout@v3
- name: Setup Python ${{ matrix.python }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
- run: pip install tox
- run: tox -e py
- if: matrix.python == 3.10
run: TOXENV=flake8,manifest,docs,spell tox
DBUtils-Release-3_0_3/.gitignore 0000664 0000000 0000000 00000000230 14422527151 0016531 0 ustar 00root root 0000000 0000000 *~
*.bak
*.default
*.egg-info
*.log
*.patch
*.pid
*.pstats
*.pyc
*.pyo
*.swp
build
dist
local
.idea
.tox
.pytest_cache
test.bat
MANIFEST
Thumbs.db
DBUtils-Release-3_0_3/LICENSE 0000664 0000000 0000000 00000002076 14422527151 0015560 0 ustar 00root root 0000000 0000000 The MIT License (MIT)
Copyright (c) 2023 Christoph Zwerschke
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
DBUtils-Release-3_0_3/MANIFEST.in 0000664 0000000 0000000 00000000424 14422527151 0016304 0 ustar 00root root 0000000 0000000 include MANIFEST.in
include LICENSE
include README.md
include .bumpversion.cfg
include .codespellrc
include .flake8
include tox.ini
recursive-include tests *.py
recursive-include docs *.rst make.py *.html *.css *.png
prune docs/_build
global-exclude *.py[co] __pycache__
DBUtils-Release-3_0_3/README.md 0000664 0000000 0000000 00000001112 14422527151 0016020 0 ustar 00root root 0000000 0000000 DBUtils
=======
DBUtils is a suite of tools providing solid, persistent and pooled connections
to a database that can be used in all kinds of multi-threaded environments.
The suite supports DB-API 2 compliant database interfaces
and the classic PyGreSQL interface.
The current version 3.0.3 of DBUtils supports Python versions 3.6 to 3.11.
**Please have a look at the [changelog](https://webwareforpython.github.io/DBUtils/changelog.html), because there were some breaking changes in version 2.0.**
The DBUtils home page can be found at https://webwareforpython.github.io/DBUtils/
DBUtils-Release-3_0_3/dbutils/ 0000775 0000000 0000000 00000000000 14422527151 0016214 5 ustar 00root root 0000000 0000000 DBUtils-Release-3_0_3/dbutils/__init__.py 0000664 0000000 0000000 00000000324 14422527151 0020324 0 ustar 00root root 0000000 0000000 # DBUtils main package
__all__ = [
'__version__',
'simple_pooled_pg', 'steady_pg', 'pooled_pg', 'persistent_pg',
'simple_pooled_db', 'steady_db', 'pooled_db', 'persistent_db']
__version__ = '3.0.3'
DBUtils-Release-3_0_3/dbutils/persistent_db.py 0000664 0000000 0000000 00000023264 14422527151 0021442 0 ustar 00root root 0000000 0000000 """PersistentDB - persistent DB-API 2 connections.
Implements steady, thread-affine persistent connections to a database
based on an arbitrary DB-API 2 compliant database interface module.
This should result in a speedup for persistent applications such as the
application server of "Webware for Python," without loss of robustness.
Robustness is provided by using "hardened" SteadyDB connections.
Even if the underlying database is restarted and all connections
are lost, they will be automatically and transparently reopened.
However, since you don't want this to happen in the middle of a database
transaction, you must explicitly start transactions with the begin()
method so that SteadyDB knows that the underlying connection shall not
be replaced and errors passed on until the transaction is completed.
Measures are taken to make the database connections thread-affine.
This means the same thread always uses the same cached connection,
and no other thread will use it. So even if the underlying DB-API module
is not thread-safe at the connection level this will be no problem here.
For best performance, the application server should keep threads persistent.
For this, you have to set MinServerThreads = MaxServerThreads in Webware.
For the Python DB-API 2 specification, see:
https://www.python.org/dev/peps/pep-0249/
For information on Webware for Python, see:
https://webwareforpython.github.io/w4py/
Usage:
First you need to set up a generator for your kind of database connections
by creating an instance of PersistentDB, passing the following parameters:
creator: either an arbitrary function returning new DB-API 2
connection objects or a DB-API 2 compliant database module
maxusage: the maximum number of reuses of a single connection
(the default of 0 or None means unlimited reuse)
Whenever the limit is reached, the connection will be reset.
setsession: an optional list of SQL commands that may serve to
prepare the session, e.g. ["set datestyle to german", ...].
failures: an optional exception class or a tuple of exception classes
for which the connection failover mechanism shall be applied,
if the default (OperationalError, InterfaceError, InternalError)
is not adequate for the used database module
ping: an optional flag controlling when connections are checked
with the ping() method if such a method is available
(0 = None = never, 1 = default = whenever it is requested,
2 = when a cursor is created, 4 = when a query is executed,
7 = always, and all other bit combinations of these values)
closeable: if this is set to true, then closing connections will
be allowed, but by default this will be silently ignored
threadlocal: an optional class for representing thread-local data
that will be used instead of our Python implementation
(threading.local is faster, but cannot be used in all cases)
The creator function or the connect function of the DB-API 2 compliant
database module specified as the creator will receive any additional
parameters such as the host, database, user, password etc. You may
choose some or all of these parameters in your own creator function,
allowing for sophisticated failover and load-balancing mechanisms.
For instance, if you are using pgdb as your DB-API 2 database module and want
every connection to your local database 'mydb' to be reused 1000 times:
import pgdb # import used DB-API 2 module
from dbutils.persistent_db import PersistentDB
persist = PersistentDB(pgdb, 1000, database='mydb')
Once you have set up the generator with these parameters, you can
request database connections of that kind:
db = persist.connection()
You can use these connections just as if they were ordinary
DB-API 2 connections. Actually what you get is the hardened
SteadyDB version of the underlying DB-API 2 connection.
Closing a persistent connection with db.close() will be silently
ignored since it would be reopened at the next usage anyway and
contrary to the intent of having persistent connections. Instead,
the connection will be automatically closed when the thread dies.
You can change this behavior by setting the closeable parameter.
Note that you need to explicitly start transactions by calling the
begin() method. This ensures that the transparent reopening will be
suspended until the end of the transaction, and that the connection
will be rolled back before being reused by the same thread.
By setting the threadlocal parameter to threading.local, getting
connections may become a bit faster, but this may not work in all
environments (for instance, mod_wsgi is known to cause problems
since it clears the threading.local data between requests).
Ideas for improvement:
* Add a thread for monitoring, restarting (or closing) bad or expired
connections (similar to DBConnectionPool/ResourcePool by Warren Smith).
* Optionally log usage, bad connections and exceeding of limits.
Copyright, credits and license:
* Contributed as supplement for Webware for Python and PyGreSQL
by Christoph Zwerschke in September 2005
* Based on an idea presented on the Webware developer mailing list
by Geoffrey Talvola in July 2005
Licensed under the MIT license.
"""
from . import __version__
from .steady_db import connect
try:
# Prefer the pure Python version of threading.local.
# The C implementation turned out to be problematic with mod_wsgi,
# since it does not keep the thread-local data between requests.
from _threading_local import local
except ImportError:
# Fall back to the default version of threading.local.
from threading import local
class PersistentDBError(Exception):
"""General PersistentDB error."""
class NotSupportedError(PersistentDBError):
"""DB-API module not supported by PersistentDB."""
class PersistentDB:
"""Generator for persistent DB-API 2 connections.
After you have created the connection pool, you can use
connection() to get thread-affine, steady DB-API 2 connections.
"""
version = __version__
def __init__(
self, creator,
maxusage=None, setsession=None, failures=None, ping=1,
closeable=False, threadlocal=None, *args, **kwargs):
"""Set up the persistent DB-API 2 connection generator.
creator: either an arbitrary function returning new DB-API 2
connection objects or a DB-API 2 compliant database module
maxusage: maximum number of reuses of a single connection
(number of database operations, 0 or None means unlimited)
Whenever the limit is reached, the connection will be reset.
setsession: optional list of SQL commands that may serve to prepare
the session, e.g. ["set datestyle to ...", "set time zone ..."]
failures: an optional exception class or a tuple of exception classes
for which the connection failover mechanism shall be applied,
if the default (OperationalError, InterfaceError, InternalError)
is not adequate for the used database module
ping: determines when the connection should be checked with ping()
(0 = None = never, 1 = default = whenever it is requested,
2 = when a cursor is created, 4 = when a query is executed,
7 = always, and all other bit combinations of these values)
closeable: if this is set to true, then closing connections will
be allowed, but by default this will be silently ignored
threadlocal: an optional class for representing thread-local data
that will be used instead of our Python implementation
(threading.local is faster, but cannot be used in all cases)
args, kwargs: the parameters that shall be passed to the creator
function or the connection constructor of the DB-API 2 module
"""
try:
threadsafety = creator.threadsafety
except AttributeError:
try:
threadsafety = creator.dbapi.threadsafety
except AttributeError:
try:
if not callable(creator.connect):
raise AttributeError
except AttributeError:
threadsafety = 1
else:
threadsafety = 0
if not threadsafety:
raise NotSupportedError("Database module is not thread-safe.")
self._creator = creator
self._maxusage = maxusage
self._setsession = setsession
self._failures = failures
self._ping = ping
self._closeable = closeable
self._args, self._kwargs = args, kwargs
self.thread = (threadlocal or local)()
def steady_connection(self):
"""Get a steady, non-persistent DB-API 2 connection."""
return connect(
self._creator, self._maxusage, self._setsession,
self._failures, self._ping, self._closeable,
*self._args, **self._kwargs)
def connection(self, shareable=False):
"""Get a steady, persistent DB-API 2 connection.
The shareable parameter exists only for compatibility with the
PooledDB connection method. In reality, persistent connections
are of course never shared with other threads.
"""
try:
con = self.thread.connection
except AttributeError:
con = self.steady_connection()
if not con.threadsafety():
raise NotSupportedError("Database module is not thread-safe.")
self.thread.connection = con
con._ping_check()
return con
def dedicated_connection(self):
"""Alias for connection(shareable=False)."""
return self.connection()
DBUtils-Release-3_0_3/dbutils/persistent_pg.py 0000664 0000000 0000000 00000015572 14422527151 0021466 0 ustar 00root root 0000000 0000000 """PersistentPg - persistent classic PyGreSQL connections.
Implements steady, thread-affine persistent connections to a PostgreSQL
database using the classic (not DB-API 2 compliant) PyGreSQL API.
This should result in a speedup for persistent applications such as the
application server of "Webware for Python," without loss of robustness.
Robustness is provided by using "hardened" SteadyPg connections.
Even if the underlying database is restarted and all connections
are lost, they will be automatically and transparently reopened.
However, since you don't want this to happen in the middle of a database
transaction, you must explicitly start transactions with the begin()
method so that SteadyPg knows that the underlying connection shall not
be replaced and errors passed on until the transaction is completed.
Measures are taken to make the database connections thread-affine.
This means the same thread always uses the same cached connection,
and no other thread will use it. So the fact that the classic PyGreSQL
pg module is not thread-safe at the connection level is no problem here.
For best performance, the application server should keep threads persistent.
For this, you have to set MinServerThreads = MaxServerThreads in Webware.
For more information on PostgreSQL, see:
https://www.postgresql.org/
For more information on PyGreSQL, see:
http://www.pygresql.org
For more information on Webware for Python, see:
https://webwareforpython.github.io/w4py/
Usage:
First you need to set up a generator for your kind of database connections
by creating an instance of PersistentPg, passing the following parameters:
maxusage: the maximum number of reuses of a single connection
(the default of 0 or None means unlimited reuse)
When this maximum usage number of the connection is reached,
the connection is automatically reset (closed and reopened).
setsession: An optional list of SQL commands that may serve to
prepare the session, e.g. ["set datestyle to german", ...]
closeable: if this is set to true, then closing connections will
be allowed, but by default this will be silently ignored
threadlocal: an optional class for representing thread-local data
that will be used instead of our Python implementation
(threading.local is faster, but cannot be used in all cases)
Additionally, you have to pass the parameters for the actual
PostgreSQL connection which are passed via PyGreSQL,
such as the names of the host, database, user, password etc.
For instance, if you want every connection to your local database 'mydb'
to be reused 1000 times:
from dbutils.persistent_pg import PersistentPg
persist = PersistentPg(5, dbname='mydb')
Once you have set up the generator with these parameters, you can
request database connections of that kind:
db = persist.connection()
You can use these connections just as if they were ordinary
classic PyGreSQL API connections. Actually what you get is the
hardened SteadyPg version of a classic PyGreSQL connection.
Closing a persistent connection with db.close() will be silently
ignored since it would be reopened at the next usage anyway and
contrary to the intent of having persistent connections. Instead,
the connection will be automatically closed when the thread dies.
You can change this behavior by setting the closeable parameter.
Note that you need to explicitly start transactions by calling the
begin() method. This ensures that the transparent reopening will be
suspended until the end of the transaction, and that the connection
will be rolled back before being reused in the same thread. To end
transactions, use one of the end(), commit() or rollback() methods.
By setting the threadlocal parameter to threading.local, getting
connections may become a bit faster, but this may not work in all
environments (for instance, mod_wsgi is known to cause problems
since it clears the threading.local data between requests).
Ideas for improvement:
* Add a thread for monitoring, restarting (or closing) bad or expired
connections (similar to DBConnectionPool/ResourcePool by Warren Smith).
* Optionally log usage, bad connections and exceeding of limits.
Copyright, credits and license:
* Contributed as supplement for Webware for Python and PyGreSQL
by Christoph Zwerschke in September 2005
* Based on an idea presented on the Webware developer mailing list
by Geoffrey Talvola in July 2005
Licensed under the MIT license.
"""
from . import __version__
from .steady_pg import SteadyPgConnection
try:
# Prefer the pure Python version of threading.local.
# The C implementation turned out to be problematic with mod_wsgi,
# since it does not keep the thread-local data between requests.
from _threading_local import local
except ImportError:
# Fall back to the default version of threading.local.
from threading import local
class PersistentPg:
"""Generator for persistent classic PyGreSQL connections.
After you have created the connection pool, you can use
connection() to get thread-affine, steady PostgreSQL connections.
"""
version = __version__
def __init__(
self, maxusage=None, setsession=None,
closeable=False, threadlocal=None, *args, **kwargs):
"""Set up the persistent PostgreSQL connection generator.
maxusage: maximum number of reuses of a single connection
(0 or None means unlimited reuse)
When this maximum usage number of the connection is reached,
the connection is automatically reset (closed and reopened).
setsession: optional list of SQL commands that may serve to prepare
the session, e.g. ["set datestyle to ...", "set time zone ..."]
closeable: if this is set to true, then closing connections will
be allowed, but by default this will be silently ignored
threadlocal: an optional class for representing thread-local data
that will be used instead of our Python implementation
(threading.local is faster, but cannot be used in all cases)
args, kwargs: the parameters that shall be used to establish
the PostgreSQL connections using class PyGreSQL pg.DB()
"""
self._maxusage = maxusage
self._setsession = setsession
self._closeable = closeable
self._args, self._kwargs = args, kwargs
self.thread = (threadlocal or local)()
def steady_connection(self):
"""Get a steady, non-persistent PyGreSQL connection."""
return SteadyPgConnection(
self._maxusage, self._setsession, self._closeable,
*self._args, **self._kwargs)
def connection(self):
"""Get a steady, persistent PyGreSQL connection."""
try:
con = self.thread.connection
except AttributeError:
con = self.steady_connection()
self.thread.connection = con
return con
DBUtils-Release-3_0_3/dbutils/pooled_db.py 0000664 0000000 0000000 00000051467 14422527151 0020532 0 ustar 00root root 0000000 0000000 """PooledDB - pooling for DB-API 2 connections.
Implements a pool of steady, thread-safe cached connections
to a database which are transparently reused,
using an arbitrary DB-API 2 compliant database interface module.
This should result in a speedup for persistent applications such as the
application server of "Webware for Python," without loss of robustness.
Robustness is provided by using "hardened" SteadyDB connections.
Even if the underlying database is restarted and all connections
are lost, they will be automatically and transparently reopened.
However, since you don't want this to happen in the middle of a database
transaction, you must explicitly start transactions with the begin()
method so that SteadyDB knows that the underlying connection shall not
be replaced and errors passed on until the transaction is completed.
Measures are taken to make the pool of connections thread-safe.
If the underlying DB-API module is thread-safe at the connection level,
the requested connections may be shared with other threads by default,
but you can also request dedicated connections in case you need them.
For the Python DB-API 2 specification, see:
https://www.python.org/dev/peps/pep-0249/
For information on Webware for Python, see:
https://webwareforpython.github.io/w4py/
Usage:
First you need to set up the database connection pool by creating
an instance of PooledDB, passing the following parameters:
creator: either an arbitrary function returning new DB-API 2
connection objects or a DB-API 2 compliant database module
mincached: the initial number of idle connections in the pool
(the default of 0 means no connections are made at startup)
maxcached: the maximum number of idle connections in the pool
(the default value of 0 or None means unlimited pool size)
maxshared: maximum number of shared connections allowed
(the default value of 0 or None means all connections are dedicated)
When this maximum number is reached, connections are
shared if they have been requested as shareable.
maxconnections: maximum number of connections generally allowed
(the default value of 0 or None means any number of connections)
blocking: determines behavior when exceeding the maximum
(if this is set to true, block and wait until the number of
connections decreases, but by default an error will be reported)
maxusage: maximum number of reuses of a single connection
(the default of 0 or None means unlimited reuse)
When this maximum usage number of the connection is reached,
the connection is automatically reset (closed and reopened).
setsession: an optional list of SQL commands that may serve to
prepare the session, e.g. ["set datestyle to german", ...]
reset: how connections should be reset when returned to the pool
(False or None to rollback transactions started with begin(),
the default value True always issues a rollback for safety's sake)
failures: an optional exception class or a tuple of exception classes
for which the connection failover mechanism shall be applied,
if the default (OperationalError, InterfaceError, InternalError)
is not adequate for the used database module
ping: an optional flag controlling when connections are checked
with the ping() method if such a method is available
(0 = None = never, 1 = default = whenever fetched from the pool,
2 = when a cursor is created, 4 = when a query is executed,
7 = always, and all other bit combinations of these values)
The creator function or the connect function of the DB-API 2 compliant
database module specified as the creator will receive any additional
parameters such as the host, database, user, password etc. You may
choose some or all of these parameters in your own creator function,
allowing for sophisticated failover and load-balancing mechanisms.
For instance, if you are using pgdb as your DB-API 2 database module and
want a pool of at least five connections to your local database 'mydb':
import pgdb # import used DB-API 2 module
from dbutils.pooled_db import PooledDB
pool = PooledDB(pgdb, 5, database='mydb')
Once you have set up the connection pool you can request
database connections from that pool:
db = pool.connection()
You can use these connections just as if they were ordinary
DB-API 2 connections. Actually what you get is the hardened
SteadyDB version of the underlying DB-API 2 connection.
Please note that the connection may be shared with other threads
by default if you set a non-zero maxshared parameter and the DB-API 2
module allows this. If you want to have a dedicated connection, use:
db = pool.connection(shareable=False)
You can also use this to get a dedicated connection:
db = pool.dedicated_connection()
If you don't need it any more, you should immediately return it to the
pool with db.close(). You can get another connection in the same way.
Warning: In a threaded environment, never do the following:
pool.connection().cursor().execute(...)
This would release the connection too early for reuse which may be
fatal if the connections are not thread-safe. Make sure that the
connection object stays alive as long as you are using it, like that:
db = pool.connection()
cur = db.cursor()
cur.execute(...)
res = cur.fetchone()
cur.close() # or del cur
db.close() # or del db
You can also use context managers for simpler code:
with pool.connection() as db:
with db.cursor as cur:
cur.execute(...)
res = cur.fetchone()
Note that you need to explicitly start transactions by calling the
begin() method. This ensures that the connection will not be shared
with other threads, that the transparent reopening will be suspended
until the end of the transaction, and that the connection will be rolled
back before being given back to the connection pool.
Ideas for improvement:
* Add a thread for monitoring, restarting (or closing) bad or expired
connections (similar to DBConnectionPool/ResourcePool by Warren Smith).
* Optionally log usage, bad connections and exceeding of limits.
Copyright, credits and license:
* Contributed as supplement for Webware for Python and PyGreSQL
by Christoph Zwerschke in September 2005
* Based on the code of DBPool, contributed to Webware for Python
by Dan Green in December 2000
Licensed under the MIT license.
"""
from threading import Condition
from . import __version__
from .steady_db import connect
class PooledDBError(Exception):
"""General PooledDB error."""
class InvalidConnection(PooledDBError):
"""Database connection is invalid."""
class NotSupportedError(PooledDBError):
"""DB-API module not supported by PooledDB."""
class TooManyConnections(PooledDBError):
"""Too many database connections were opened."""
class PooledDB:
"""Pool for DB-API 2 connections.
After you have created the connection pool, you can use
connection() to get pooled, steady DB-API 2 connections.
"""
version = __version__
def __init__(
self, creator, mincached=0, maxcached=0,
maxshared=0, maxconnections=0, blocking=False,
maxusage=None, setsession=None, reset=True,
failures=None, ping=1,
*args, **kwargs):
"""Set up the DB-API 2 connection pool.
creator: either an arbitrary function returning new DB-API 2
connection objects or a DB-API 2 compliant database module
mincached: initial number of idle connections in the pool
(0 means no connections are made at startup)
maxcached: maximum number of idle connections in the pool
(0 or None means unlimited pool size)
maxshared: maximum number of shared connections
(0 or None means all connections are dedicated)
When this maximum number is reached, connections are
shared if they have been requested as shareable.
maxconnections: maximum number of connections generally allowed
(0 or None means an arbitrary number of connections)
blocking: determines behavior when exceeding the maximum
(if this is set to true, block and wait until the number of
connections decreases, otherwise an error will be reported)
maxusage: maximum number of reuses of a single connection
(0 or None means unlimited reuse)
When this maximum usage number of the connection is reached,
the connection is automatically reset (closed and reopened).
setsession: optional list of SQL commands that may serve to prepare
the session, e.g. ["set datestyle to ...", "set time zone ..."]
reset: how connections should be reset when returned to the pool
(False or None to rollback transactions started with begin(),
True to always issue a rollback for safety's sake)
failures: an optional exception class or a tuple of exception classes
for which the connection failover mechanism shall be applied,
if the default (OperationalError, InterfaceError, InternalError)
is not adequate for the used database module
ping: determines when the connection should be checked with ping()
(0 = None = never, 1 = default = whenever fetched from the pool,
2 = when a cursor is created, 4 = when a query is executed,
7 = always, and all other bit combinations of these values)
args, kwargs: the parameters that shall be passed to the creator
function or the connection constructor of the DB-API 2 module
"""
try:
threadsafety = creator.threadsafety
except AttributeError:
try:
threadsafety = creator.dbapi.threadsafety
except AttributeError:
try:
if not callable(creator.connect):
raise AttributeError
except AttributeError:
threadsafety = 1
else:
threadsafety = 0
if not threadsafety:
raise NotSupportedError("Database module is not thread-safe.")
self._creator = creator
self._args, self._kwargs = args, kwargs
self._blocking = blocking
self._maxusage = maxusage
self._setsession = setsession
self._reset = reset
self._failures = failures
self._ping = ping
if mincached is None:
mincached = 0
if maxcached is None:
maxcached = 0
if maxconnections is None:
maxconnections = 0
if maxcached:
if maxcached < mincached:
maxcached = mincached
self._maxcached = maxcached
else:
self._maxcached = 0
if threadsafety > 1 and maxshared:
self._maxshared = maxshared
self._shared_cache = [] # the cache for shared connections
else:
self._maxshared = 0
if maxconnections:
if maxconnections < maxcached:
maxconnections = maxcached
if maxconnections < maxshared:
maxconnections = maxshared
self._maxconnections = maxconnections
else:
self._maxconnections = 0
self._idle_cache = [] # the actual pool of idle connections
self._lock = Condition()
self._connections = 0
# Establish an initial number of idle database connections:
idle = [self.dedicated_connection() for i in range(mincached)]
while idle:
idle.pop().close()
def steady_connection(self):
"""Get a steady, unpooled DB-API 2 connection."""
return connect(
self._creator, self._maxusage, self._setsession,
self._failures, self._ping, True, *self._args, **self._kwargs)
def connection(self, shareable=True):
"""Get a steady, cached DB-API 2 connection from the pool.
If shareable is set and the underlying DB-API 2 allows it,
then the connection may be shared with other threads.
"""
if shareable and self._maxshared:
with self._lock:
while (not self._shared_cache and self._maxconnections
and self._connections >= self._maxconnections):
self._wait_lock()
if len(self._shared_cache) < self._maxshared:
# shared cache is not full, get a dedicated connection
try: # first try to get it from the idle cache
con = self._idle_cache.pop(0)
except IndexError: # else get a fresh connection
con = self.steady_connection()
else:
con._ping_check() # check this connection
con = SharedDBConnection(con)
self._connections += 1
else: # shared cache full or no more connections allowed
self._shared_cache.sort() # least shared connection first
con = self._shared_cache.pop(0) # get it
while con.con._transaction:
# do not share connections which are in a transaction
self._shared_cache.insert(0, con)
self._wait_lock()
self._shared_cache.sort()
con = self._shared_cache.pop(0)
con.con._ping_check() # check the underlying connection
con.share() # increase share of this connection
# put the connection (back) into the shared cache
self._shared_cache.append(con)
self._lock.notify()
con = PooledSharedDBConnection(self, con)
else: # try to get a dedicated connection
with self._lock:
while (self._maxconnections
and self._connections >= self._maxconnections):
self._wait_lock()
# connection limit not reached, get a dedicated connection
try: # first try to get it from the idle cache
con = self._idle_cache.pop(0)
except IndexError: # else get a fresh connection
con = self.steady_connection()
else:
con._ping_check() # check connection
con = PooledDedicatedDBConnection(self, con)
self._connections += 1
return con
def dedicated_connection(self):
"""Alias for connection(shareable=False)."""
return self.connection(False)
def unshare(self, con):
"""Decrease the share of a connection in the shared cache."""
with self._lock:
con.unshare()
shared = con.shared
if not shared: # connection is idle,
try: # so try to remove it
self._shared_cache.remove(con) # from shared cache
except ValueError:
pass # pool has already been closed
if not shared: # connection has become idle,
self.cache(con.con) # so add it to the idle cache
def cache(self, con):
"""Put a dedicated connection back into the idle cache."""
with self._lock:
if not self._maxcached or len(self._idle_cache) < self._maxcached:
con._reset(force=self._reset) # rollback possible transaction
# the idle cache is not full, so put it there
self._idle_cache.append(con) # append it to the idle cache
else: # if the idle cache is already full,
con.close() # then close the connection
self._connections -= 1
self._lock.notify()
def close(self):
"""Close all connections in the pool."""
with self._lock:
while self._idle_cache: # close all idle connections
con = self._idle_cache.pop(0)
try:
con.close()
except Exception:
pass
if self._maxshared: # close all shared connections
while self._shared_cache:
con = self._shared_cache.pop(0).con
try:
con.close()
except Exception:
pass
self._connections -= 1
self._lock.notify_all()
def __del__(self):
"""Delete the pool."""
try:
self.close()
except: # builtin Exceptions might not exist any more
pass
def _wait_lock(self):
"""Wait until notified or report an error."""
if not self._blocking:
raise TooManyConnections
self._lock.wait()
# Auxiliary classes for pooled connections
class PooledDedicatedDBConnection:
"""Auxiliary proxy class for pooled dedicated connections."""
def __init__(self, pool, con):
"""Create a pooled dedicated connection.
pool: the corresponding PooledDB instance
con: the underlying SteadyDB connection
"""
# basic initialization to make finalizer work
self._con = None
# proper initialization of the connection
if not con.threadsafety():
raise NotSupportedError("Database module is not thread-safe.")
self._pool = pool
self._con = con
def close(self):
"""Close the pooled dedicated connection."""
# Instead of actually closing the connection,
# return it to the pool for future reuse.
if self._con:
self._pool.cache(self._con)
self._con = None
def __getattr__(self, name):
"""Proxy all members of the class."""
if self._con:
return getattr(self._con, name)
raise InvalidConnection
def __del__(self):
"""Delete the pooled connection."""
try:
self.close()
except: # builtin Exceptions might not exist any more
pass
def __enter__(self):
"""Enter a runtime context for the connection."""
return self
def __exit__(self, *exc):
"""Exit a runtime context for the connection."""
self.close()
class SharedDBConnection:
"""Auxiliary class for shared connections."""
def __init__(self, con):
"""Create a shared connection.
con: the underlying SteadyDB connection
"""
self.con = con
self.shared = 1
def __lt__(self, other):
if self.con._transaction == other.con._transaction:
return self.shared < other.shared
return not self.con._transaction
def __le__(self, other):
if self.con._transaction == other.con._transaction:
return self.shared <= other.shared
return not self.con._transaction
def __eq__(self, other):
return (self.con._transaction == other.con._transaction
and self.shared == other.shared)
def __ne__(self, other):
return not self.__eq__(other)
def __gt__(self, other):
return other.__lt__(self)
def __ge__(self, other):
return other.__le__(self)
def share(self):
"""Increase the share of this connection."""
self.shared += 1
def unshare(self):
"""Decrease the share of this connection."""
self.shared -= 1
class PooledSharedDBConnection:
"""Auxiliary proxy class for pooled shared connections."""
def __init__(self, pool, shared_con):
"""Create a pooled shared connection.
pool: the corresponding PooledDB instance
con: the underlying SharedDBConnection
"""
# basic initialization to make finalizer work
self._con = None
# proper initialization of the connection
con = shared_con.con
if not con.threadsafety() > 1:
raise NotSupportedError("Database connection is not thread-safe.")
self._pool = pool
self._shared_con = shared_con
self._con = con
def close(self):
"""Close the pooled shared connection."""
# Instead of actually closing the connection,
# unshare it and/or return it to the pool.
if self._con:
self._pool.unshare(self._shared_con)
self._shared_con = self._con = None
def __getattr__(self, name):
"""Proxy all members of the class."""
if self._con:
return getattr(self._con, name)
raise InvalidConnection
def __del__(self):
"""Delete the pooled connection."""
try:
self.close()
except: # builtin Exceptions might not exist any more
pass
def __enter__(self):
"""Enter a runtime context for the connection."""
return self
def __exit__(self, *exc):
"""Exit a runtime context for the connection."""
self.close()
DBUtils-Release-3_0_3/dbutils/pooled_pg.py 0000664 0000000 0000000 00000025775 14422527151 0020556 0 ustar 00root root 0000000 0000000 """PooledPg - pooling for classic PyGreSQL connections.
Implements a pool of steady, thread-safe cached connections
to a PostgreSQL database which are transparently reused,
using the classic (not DB-API 2 compliant) PyGreSQL API.
This should result in a speedup for persistent applications such as the
application server of "Webware for Python," without loss of robustness.
Robustness is provided by using "hardened" SteadyPg connections.
Even if the underlying database is restarted and all connections
are lost, they will be automatically and transparently reopened.
However, since you don't want this to happen in the middle of a database
transaction, you must explicitly start transactions with the begin()
method so that SteadyPg knows that the underlying connection shall not
be replaced and errors passed on until the transaction is completed.
Measures are taken to make the pool of connections thread-safe
regardless of the fact that the classic PyGreSQL pg module itself
is not thread-safe at the connection level.
For more information on PostgreSQL, see:
https://www.postgresql.org/
For more information on PyGreSQL, see:
http://www.pygresql.org
For more information on Webware for Python, see:
https://webwareforpython.github.io/w4py/
Usage:
First you need to set up the database connection pool by creating
an instance of PooledPg, passing the following parameters:
mincached: the initial number of connections in the pool
(the default of 0 means no connections are made at startup)
maxcached: the maximum number of connections in the pool
(the default value of 0 or None means unlimited pool size)
maxconnections: maximum number of connections generally allowed
(the default value of 0 or None means any number of connections)
blocking: determines behavior when exceeding the maximum
(if this is set to true, block and wait until the number of
connections decreases, but by default an error will be reported)
maxusage: maximum number of reuses of a single connection
(the default of 0 or None means unlimited reuse)
When this maximum usage number of the connection is reached,
the connection is automatically reset (closed and reopened).
setsession: an optional list of SQL commands that may serve to
prepare the session, e.g. ["set datestyle to german", ...]
Additionally, you have to pass the parameters for the actual
PostgreSQL connection which are passed via PyGreSQL,
such as the names of the host, database, user, password etc.
For instance, if you want a pool of at least five connections
to your local database 'mydb':
from dbutils.pooled_pg import PooledPg
pool = PooledPg(5, dbname='mydb')
Once you have set up the connection pool you can request
database connections from that pool:
db = pool.connection()
You can use these connections just as if they were ordinary
classic PyGreSQL API connections. Actually what you get is a
proxy class for the hardened SteadyPg version of the connection.
The connection will not be shared with other threads. If you don't need
it any more, you should immediately return it to the pool with db.close().
You can get another connection in the same way or with db.reopen().
Warning: In a threaded environment, never do the following:
res = pool.connection().query(...).getresult()
This would release the connection too early for reuse which may be
fatal because the connections are not thread-safe. Make sure that the
connection object stays alive as long as you are using it, like that:
db = pool.connection()
res = db.query(...).getresult()
db.close() # or del db
You can also a context manager for simpler code:
with pool.connection() as db:
res = db.query(...).getresult()
Note that you need to explicitly start transactions by calling the
begin() method. This ensures that the transparent reopening will be
suspended until the end of the transaction, and that the connection will
be rolled back before being given back to the connection pool. To end
transactions, use one of the end(), commit() or rollback() methods.
Ideas for improvement:
* Add a thread for monitoring, restarting (or closing) bad or expired
connections (similar to DBConnectionPool/ResourcePool by Warren Smith).
* Optionally log usage, bad connections and exceeding of limits.
Copyright, credits and license:
* Contributed as supplement for Webware for Python and PyGreSQL
by Christoph Zwerschke in September 2005
* Based on the code of DBPool, contributed to Webware for Python
by Dan Green in December 2000
Licensed under the MIT license.
"""
from queue import Queue, Empty, Full
from . import __version__
from .steady_pg import SteadyPgConnection
class PooledPgError(Exception):
"""General PooledPg error."""
class InvalidConnection(PooledPgError):
"""Database connection is invalid."""
class TooManyConnections(PooledPgError):
"""Too many database connections were opened."""
class PooledPg:
"""Pool for classic PyGreSQL connections.
After you have created the connection pool, you can use
connection() to get pooled, steady PostgreSQL connections.
"""
version = __version__
def __init__(
self, mincached=0, maxcached=0,
maxconnections=0, blocking=False,
maxusage=None, setsession=None, reset=None,
*args, **kwargs):
"""Set up the PostgreSQL connection pool.
mincached: initial number of connections in the pool
(0 means no connections are made at startup)
maxcached: maximum number of connections in the pool
(0 or None means unlimited pool size)
maxconnections: maximum number of connections generally allowed
(0 or None means an arbitrary number of connections)
blocking: determines behavior when exceeding the maximum
(if this is set to true, block and wait until the number of
connections decreases, otherwise an error will be reported)
maxusage: maximum number of reuses of a single connection
(0 or None means unlimited reuse)
When this maximum usage number of the connection is reached,
the connection is automatically reset (closed and reopened).
setsession: optional list of SQL commands that may serve to prepare
the session, e.g. ["set datestyle to ...", "set time zone ..."]
reset: how connections should be reset when returned to the pool
(0 or None to rollback transactions started with begin(),
1 to always issue a rollback, 2 for a complete reset)
args, kwargs: the parameters that shall be used to establish
the PostgreSQL connections using class PyGreSQL pg.DB()
"""
self._args, self._kwargs = args, kwargs
self._maxusage = maxusage
self._setsession = setsession
self._reset = reset or 0
if mincached is None:
mincached = 0
if maxcached is None:
maxcached = 0
if maxconnections is None:
maxconnections = 0
if maxcached:
if maxcached < mincached:
maxcached = mincached
if maxconnections:
if maxconnections < maxcached:
maxconnections = maxcached
# Create semaphore for number of allowed connections generally:
from threading import Semaphore
self._connections = Semaphore(maxconnections)
self._blocking = blocking
else:
self._connections = None
self._cache = Queue(maxcached) # the actual connection pool
# Establish an initial number of database connections:
idle = [self.connection() for i in range(mincached)]
while idle:
idle.pop().close()
def steady_connection(self):
"""Get a steady, unpooled PostgreSQL connection."""
return SteadyPgConnection(self._maxusage, self._setsession, True,
*self._args, **self._kwargs)
def connection(self):
"""Get a steady, cached PostgreSQL connection from the pool."""
if self._connections:
if not self._connections.acquire(self._blocking):
raise TooManyConnections
try:
con = self._cache.get(0)
except Empty:
con = self.steady_connection()
return PooledPgConnection(self, con)
def cache(self, con):
"""Put a connection back into the pool cache."""
try:
if self._reset == 2:
con.reset() # reset the connection completely
else:
if self._reset or con._transaction:
try:
con.rollback() # rollback a possible transaction
except Exception:
pass
self._cache.put(con, 0) # and then put it back into the cache
except Full:
con.close()
if self._connections:
self._connections.release()
def close(self):
"""Close all connections in the pool."""
while 1:
try:
con = self._cache.get(0)
try:
con.close()
except Exception:
pass
if self._connections:
self._connections.release()
except Empty:
break
def __del__(self):
"""Delete the pool."""
try:
self.close()
except: # builtin Exceptions might not exist any more
pass
# Auxiliary class for pooled connections
class PooledPgConnection:
"""Proxy class for pooled PostgreSQL connections."""
def __init__(self, pool, con):
"""Create a pooled DB-API 2 connection.
pool: the corresponding PooledPg instance
con: the underlying SteadyPg connection
"""
self._pool = pool
self._con = con
def close(self):
"""Close the pooled connection."""
# Instead of actually closing the connection,
# return it to the pool so it can be reused.
if self._con:
self._pool.cache(self._con)
self._con = None
def reopen(self):
"""Reopen the pooled connection."""
# If the connection is already back in the pool,
# get another connection from the pool,
# otherwise reopen the underlying connection.
if self._con:
self._con.reopen()
else:
self._con = self._pool.connection()
def __getattr__(self, name):
"""Proxy all members of the class."""
if self._con:
return getattr(self._con, name)
raise InvalidConnection
def __del__(self):
"""Delete the pooled connection."""
try:
self.close()
except: # builtin Exceptions might not exist any more
pass
def __enter__(self):
"""Enter a runtime context for the connection."""
return self
def __exit__(self, *exc):
"""Exit a runtime context for the connection."""
self.close()
DBUtils-Release-3_0_3/dbutils/simple_pooled_db.py 0000664 0000000 0000000 00000020034 14422527151 0022065 0 ustar 00root root 0000000 0000000 """SimplePooledDB - a very simple DB-API 2 database connection pool.
Implements a pool of threadsafe cached DB-API 2 connections
to a database which are transparently reused.
This should result in a speedup for persistent applications
such as the "Webware for Python" AppServer.
For more information on the DB-API 2, see:
https://www.python.org/dev/peps/pep-0249/
For more information on Webware for Python, see:
https://webwareforpython.github.io/w4py/
Measures are taken to make the pool of connections threadsafe
regardless of whether the DB-API 2 module used is threadsafe
on the connection level (threadsafety > 1) or not. It must only
be threadsafe on the module level (threadsafety = 1). If the
DB-API 2 module is threadsafe, the connections will be shared
between threads (keep this in mind if you use transactions).
Usage:
The idea behind SimplePooledDB is that it's completely transparent.
After you have established your connection pool, stating the
DB-API 2 module to be used, the number of connections
to be cached in the pool and the connection parameters, e.g.
import pgdb # import used DB-API 2 module
from dbutils.simple_pooled_db import PooledDB
dbpool = PooledDB(pgdb, 5, host=..., database=..., user=..., ...)
you can demand database connections from that pool,
db = dbpool.connection()
and use them just as if they were ordinary DB-API 2 connections.
It's really just a proxy class.
db.close() will return the connection to the pool, it will not
actually close it. This is so your existing code works nicely.
Ideas for improvement:
* Do not create the maximum number of connections on startup
already, but only a certain number and the rest on demand.
* Detect and transparently reset "bad" connections.
* Connections should have some sort of maximum usage limit
after which they should be automatically closed and reopened.
* Prefer or enforce thread-affinity for the connections,
allowing for both shareable and non-shareable connections.
Please note that these and other ideas have been already
implemented in in PooledDB, a more sophisticated version
of SimplePooledDB. You might also consider using PersistentDB
instead for thread-affine persistent database connections.
SimplePooledDB may still serve as a very simple reference
and example implementation for developers.
Copyright, credits and license:
* Contributed as MiscUtils/DBPool for Webware for Python
by Dan Green, December 2000
* Thread safety bug found by Tom Schwaller
* Fixes by Geoff Talvola (thread safety in _threadsafe_getConnection())
* Clean up by Chuck Esterbrook
* Fix unthreadsafe functions which were leaking, Jay Love
* Eli Green's webware-discuss comments were lifted for additional docs
* Clean-up and detailed commenting, rename and move to DBUtils
by Christoph Zwerschke in September 2005
Licensed under the MIT license.
"""
from . import __version__
class PooledDBError(Exception):
"""General PooledDB error."""
class NotSupportedError(PooledDBError):
"""DB-API module not supported by PooledDB."""
class PooledDBConnection:
"""A proxy class for pooled database connections.
You don't normally deal with this class directly,
but use PooledDB to get new connections.
"""
def __init__(self, pool, con):
self._con = con
self._pool = pool
def close(self):
"""Close the pooled connection."""
# Instead of actually closing the connection,
# return it to the pool so it can be reused.
if self._con is not None:
self._pool.returnConnection(self._con)
self._con = None
def __getattr__(self, name):
# All other members are the same.
return getattr(self._con, name)
def __del__(self):
self.close()
class PooledDB:
"""A very simple database connection pool.
After you have created the connection pool,
you can get connections using getConnection().
"""
version = __version__
def __init__(self, dbapi, maxconnections, *args, **kwargs):
"""Set up the database connection pool.
dbapi: the DB-API 2 compliant module you want to use
maxconnections: the number of connections cached in the pool
args, kwargs: the parameters that shall be used to establish
the database connections using connect()
"""
try:
threadsafety = dbapi.threadsafety
except Exception:
threadsafety = None
if threadsafety == 0:
raise NotSupportedError(
"Database module does not support any level of threading.")
if threadsafety == 1:
# If there is no connection level safety, build
# the pool using the synchronized queue class
# that implements all the required locking semantics.
from queue import Queue
self._queue = Queue(maxconnections) # create the queue
self.connection = self._unthreadsafe_get_connection
self.addConnection = self._unthreadsafe_add_connection
self.returnConnection = self._unthreadsafe_return_connection
elif threadsafety in (2, 3):
# If there is connection level safety, implement the
# pool with an ordinary list used as a circular buffer.
# We only need a minimum of locking in this case.
from threading import Lock
self._lock = Lock() # create a lock object to be used later
self._nextConnection = 0 # index of the next connection to be used
self._connections = [] # the list of connections
self.connection = self._threadsafe_get_connection
self.addConnection = self._threadsafe_add_connection
self.returnConnection = self._threadsafe_return_connection
else:
raise NotSupportedError(
"Database module threading support cannot be determined.")
# Establish all database connections (it would be better to
# only establish a part of them now, and the rest on demand).
for i in range(maxconnections):
self.addConnection(dbapi.connect(*args, **kwargs))
# The following functions are used with DB-API 2 modules
# that do not have connection level threadsafety, like PyGreSQL.
# However, the module must be threadsafe at the module level.
# Note: threadsafe/unthreadsafe refers to the DB-API 2 module,
# not to this class which should be threadsafe in any case.
def _unthreadsafe_get_connection(self):
"""Get a connection from the pool."""
return PooledDBConnection(self, self._queue.get())
def _unthreadsafe_add_connection(self, con):
"""Add a connection to the pool."""
self._queue.put(con)
def _unthreadsafe_return_connection(self, con):
"""Return a connection to the pool.
In this case, the connections need to be put
back into the queue after they have been used.
This is done automatically when the connection is closed
and should never be called explicitly outside of this module.
"""
self._unthreadsafe_add_connection(con)
# The following functions are used with DB-API 2 modules
# that are threadsafe at the connection level, like psycopg.
# Note: In this case, connections are shared between threads.
# This may lead to problems if you use transactions.
def _threadsafe_get_connection(self):
"""Get a connection from the pool."""
with self._lock:
next_con = self._nextConnection
con = PooledDBConnection(self, self._connections[next_con])
next_con += 1
if next_con >= len(self._connections):
next_con = 0
self._nextConnection = next_con
return con
def _threadsafe_add_connection(self, con):
"""Add a connection to the pool."""
self._connections.append(con)
def _threadsafe_return_connection(self, con):
"""Return a connection to the pool.
In this case, the connections always stay in the pool,
so there is no need to do anything here.
"""
pass
DBUtils-Release-3_0_3/dbutils/simple_pooled_pg.py 0000664 0000000 0000000 00000010737 14422527151 0022117 0 ustar 00root root 0000000 0000000 """SimplePooledPg - a very simple classic PyGreSQL connection pool.
Implements a pool of threadsafe cached connections
to a PostgreSQL database which are transparently reused,
using the classic (not DB-API 2 compliant) PyGreSQL pg API.
This should result in a speedup for persistent applications
such as the "Webware for Python" AppServer.
For more information on PostgreSQL, see:
https://www.postgresql.org/
For more information on PyGreSQL, see:
http://www.pygresql.org
For more information on Webware for Python, see:
https://webwareforpython.github.io/w4py/
Measures are taken to make the pool of connections threadsafe
regardless of the fact that the PyGreSQL pg module itself is
not threadsafe at the connection level. Connections will never be
shared between threads, so you can safely use transactions.
Usage:
The idea behind SimplePooledPg is that it's completely transparent.
After you have established your connection pool, stating the
number of connections to be cached in the pool and the
connection parameters, e.g.
from dbutils.simple_pooled_pg import PooledPg
dbpool = PooledPg(5, host=..., database=..., user=..., ...)
you can demand database connections from that pool,
db = dbpool.connection()
and use them just as if they were ordinary PyGreSQL pg API
connections. It's really just a proxy class.
db.close() will return the connection to the pool, it will not
actually close it. This is so your existing code works nicely.
Ideas for improvement:
* Do not create the maximum number of connections on startup
already, but only a certain number and the rest on demand.
* Detect and transparently reset "bad" connections. The PyGreSQL
pg API provides a status attribute and a reset() method for that.
* Connections should have some sort of "maximum usage limit"
after which they should be automatically closed and reopened.
* Prefer or enforce thread affinity for the connections.
Please note that these and other ideas have been already
implemented in in PooledPg, a more sophisticated version
of SimplePooledPg. You might also consider using PersistentPg
instead for thread-affine persistent PyGreSQL connections.
SimplePooledPg may still serve as a very simple reference
and example implementation for developers.
Copyright, credits and license:
* Contributed as supplement for Webware for Python and PyGreSQL
by Christoph Zwerschke in September 2005
* Based on the code of DBPool, contributed to Webware for Python
by Dan Green in December 2000
Licensed under the MIT license.
"""
from pg import DB as PgConnection
from . import __version__
class PooledPgConnection:
"""A proxy class for pooled PostgreSQL connections.
You don't normally deal with this class directly,
but use PooledPg to get new connections.
"""
def __init__(self, pool, con):
self._con = con
self._pool = pool
def close(self):
"""Close the pooled connection."""
# Instead of actually closing the connection,
# return it to the pool so it can be reused.
if self._con is not None:
self._pool.cache(self._con)
self._con = None
def __getattr__(self, name):
# All other members are the same.
return getattr(self._con, name)
def __del__(self):
self.close()
class PooledPg:
"""A very simple PostgreSQL connection pool.
After you have created the connection pool,
you can get connections using getConnection().
"""
version = __version__
def __init__(self, maxconnections, *args, **kwargs):
"""Set up the PostgreSQL connection pool.
maxconnections: the number of connections cached in the pool
args, kwargs: the parameters that shall be used to establish
the PostgreSQL connections using pg.connect()
"""
# Since there is no connection level safety, we
# build the pool using the synchronized queue class
# that implements all the required locking semantics.
from queue import Queue
self._queue = Queue(maxconnections)
# Establish all database connections (it would be better to
# only establish a part of them now, and the rest on demand).
for i in range(maxconnections):
self.cache(PgConnection(*args, **kwargs))
def cache(self, con):
"""Add or return a connection to the pool."""
self._queue.put(con)
def connection(self):
"""Get a connection from the pool."""
return PooledPgConnection(self, self._queue.get())
DBUtils-Release-3_0_3/dbutils/steady_db.py 0000664 0000000 0000000 00000064555 14422527151 0020543 0 ustar 00root root 0000000 0000000 """SteadyDB - hardened DB-API 2 connections.
Implements steady connections to a database based on an
arbitrary DB-API 2 compliant database interface module.
The connections are transparently reopened when they are
closed or the database connection has been lost or when
they are used more often than an optional usage limit.
Database cursors are transparently reopened as well when
the execution of a database operation cannot be performed
due to a lost connection. Only if the connection is lost
after the execution, when rows are already fetched from the
database, this will give an error and the cursor will not
be reopened automatically, because there is no reliable way
to recover the state of the cursor in such a situation.
Connections which have been marked as being in a transaction
with a begin() call will not be silently replaced either.
A typical situation where database connections are lost
is when the database server or an intervening firewall is
shutdown and restarted for maintenance reasons. In such a
case, all database connections would become unusable, even
though the database service may be already available again.
The "hardened" connections provided by this module will
make the database connections immediately available again.
This approach results in a steady database connection that
can be used by PooledDB or PersistentDB to create pooled or
persistent connections to a database in a threaded environment
such as the application server of "Webware for Python."
Note, however, that the connections themselves may not be
thread-safe (depending on the used DB-API module).
For the Python DB-API 2 specification, see:
https://www.python.org/dev/peps/pep-0249/
For information on Webware for Python, see:
https://webwareforpython.github.io/w4py/
Usage:
You can use the connection constructor connect() in the same
way as you would use the connection constructor of a DB-API 2
module if you specify the DB-API 2 module to be used as the
first parameter, or alternatively you can specify an arbitrary
constructor function returning new DB-API 2 compliant connection
objects as the first parameter. Passing just a function allows
implementing failover mechanisms and load balancing strategies.
You may also specify a usage limit as the second parameter
(set it to None if you prefer unlimited usage), an optional
list of commands that may serve to prepare the session as a
third parameter, the exception classes for which the failover
mechanism shall be applied, and you can specify whether is is
allowed to close the connection (by default this is true).
When the connection to the database is lost or has been used
too often, it will be transparently reset in most situations,
without further notice.
import pgdb # import used DB-API 2 module
from dbutils.steady_db import connect
db = connect(pgdb, 10000, ["set datestyle to german"],
host=..., database=..., user=..., ...)
...
cursor = db.cursor()
...
cursor.execute('select ...')
result = cursor.fetchall()
...
cursor.close()
...
db.close()
Ideas for improvement:
* Alternatively to the maximum number of uses,
implement a maximum time to live for connections.
* Optionally log usage and loss of connection.
Copyright, credits and license:
* Contributed as supplement for Webware for Python and PyGreSQL
by Christoph Zwerschke in September 2005
* Allowing creator functions as first parameter as in SQLAlchemy
suggested by Ezio Vernacotola in December 2006
Licensed under the MIT license.
"""
import sys
from . import __version__
class SteadyDBError(Exception):
"""General SteadyDB error."""
class InvalidCursor(SteadyDBError):
"""Database cursor is invalid."""
def connect(
creator, maxusage=None, setsession=None,
failures=None, ping=1, closeable=True, *args, **kwargs):
"""A tough version of the connection constructor of a DB-API 2 module.
creator: either an arbitrary function returning new DB-API 2 compliant
connection objects or a DB-API 2 compliant database module
maxusage: maximum usage limit for the underlying DB-API 2 connection
(number of database operations, 0 or None means unlimited usage)
callproc(), execute() and executemany() count as one operation.
When the limit is reached, the connection is automatically reset.
setsession: an optional list of SQL commands that may serve to prepare
the session, e.g. ["set datestyle to german", "set time zone mez"]
failures: an optional exception class or a tuple of exception classes
for which the failover mechanism shall be applied, if the default
(OperationalError, InternalError, Interface) is not adequate
for the used database module
ping: determines when the connection should be checked with ping()
(0 = None = never, 1 = default = when _ping_check() is called,
2 = whenever a cursor is created, 4 = when a query is executed,
7 = always, and all other bit combinations of these values)
closeable: if this is set to false, then closing the connection will
be silently ignored, but by default the connection can be closed
args, kwargs: the parameters that shall be passed to the creator
function or the connection constructor of the DB-API 2 module
"""
return SteadyDBConnection(
creator, maxusage, setsession,
failures, ping, closeable, *args, **kwargs)
class SteadyDBConnection:
"""A "tough" version of DB-API 2 connections."""
version = __version__
def __init__(
self, creator, maxusage=None, setsession=None,
failures=None, ping=1, closeable=True, *args, **kwargs):
"""Create a "tough" DB-API 2 connection."""
# basic initialization to make finalizer work
self._con = None
self._closed = True
# proper initialization of the connection
try:
self._creator = creator.connect
try:
if creator.dbapi.connect:
self._dbapi = creator.dbapi
except AttributeError:
self._dbapi = creator
except AttributeError:
# try finding the DB-API 2 module via the connection creator
self._creator = creator
try:
self._dbapi = creator.dbapi
except AttributeError:
try:
self._dbapi = sys.modules[creator.__module__]
if self._dbapi.connect != creator:
raise AttributeError
except (AttributeError, KeyError):
self._dbapi = None
try:
self._threadsafety = creator.threadsafety
except AttributeError:
try:
self._threadsafety = self._dbapi.threadsafety
except AttributeError:
self._threadsafety = None
if not callable(self._creator):
raise TypeError(f"{creator!r} is not a connection provider.")
if maxusage is None:
maxusage = 0
if not isinstance(maxusage, int):
raise TypeError("'maxusage' must be an integer value.")
self._maxusage = maxusage
self._setsession_sql = setsession
if failures is not None and not isinstance(
failures, tuple) and not issubclass(failures, Exception):
raise TypeError("'failures' must be a tuple of exceptions.")
self._failures = failures
self._ping = ping if isinstance(ping, int) else 0
self._closeable = closeable
self._args, self._kwargs = args, kwargs
self._store(self._create())
def __enter__(self):
"""Enter the runtime context for the connection object."""
return self
def __exit__(self, *exc):
"""Exit the runtime context for the connection object.
This does not close the connection, but it ends a transaction.
"""
if exc[0] is None and exc[1] is None and exc[2] is None:
self.commit()
else:
self.rollback()
def _create(self):
"""Create a new connection using the creator function."""
con = self._creator(*self._args, **self._kwargs)
try:
try:
if self._dbapi.connect != self._creator:
raise AttributeError
except AttributeError:
# try finding the DB-API 2 module via the connection itself
try:
mod = con.__module__
except AttributeError:
mod = None
while mod:
try:
self._dbapi = sys.modules[mod]
if not callable(self._dbapi.connect):
raise AttributeError
except (AttributeError, KeyError):
pass
else:
break
i = mod.rfind('.')
if i < 0:
mod = None
else:
mod = mod[:i]
else:
try:
mod = con.OperationalError.__module__
except AttributeError:
mod = None
while mod:
try:
self._dbapi = sys.modules[mod]
if not callable(self._dbapi.connect):
raise AttributeError
except (AttributeError, KeyError):
pass
else:
break
i = mod.rfind('.')
if i < 0:
mod = None
else:
mod = mod[:i]
else:
self._dbapi = None
if self._threadsafety is None:
try:
self._threadsafety = self._dbapi.threadsafety
except AttributeError:
try:
self._threadsafety = con.threadsafety
except AttributeError:
pass
if self._failures is None:
try:
self._failures = (
self._dbapi.OperationalError,
self._dbapi.InterfaceError,
self._dbapi.InternalError)
except AttributeError:
try:
self._failures = (
self._creator.OperationalError,
self._creator.InterfaceError,
self._creator.InternalError)
except AttributeError:
try:
self._failures = (
con.OperationalError,
con.InterfaceError,
con.InternalError)
except AttributeError:
raise AttributeError(
"Could not determine failure exceptions"
" (please set failures or creator.dbapi).")
if isinstance(self._failures, tuple):
self._failure = self._failures[0]
else:
self._failure = self._failures
self._setsession(con)
except Exception as error:
# the database module could not be determined
# or the session could not be prepared
try: # close the connection first
con.close()
except Exception:
pass
raise error # re-raise the original error again
return con
def _setsession(self, con=None):
"""Execute the SQL commands for session preparation."""
if con is None:
con = self._con
if self._setsession_sql:
cursor = con.cursor()
for sql in self._setsession_sql:
cursor.execute(sql)
cursor.close()
def _store(self, con):
"""Store a database connection for subsequent use."""
self._con = con
self._transaction = False
self._closed = False
self._usage = 0
def _close(self):
"""Close the tough connection.
You can always close a tough connection with this method
and it will not complain if you close it more than once.
"""
if not self._closed:
try:
self._con.close()
except Exception:
pass
self._transaction = False
self._closed = True
def _reset(self, force=False):
"""Reset a tough connection.
Rollback if forced or the connection was in a transaction.
"""
if not self._closed and (force or self._transaction):
try:
self.rollback()
except Exception:
pass
def _ping_check(self, ping=1, reconnect=True):
"""Check whether the connection is still alive using ping().
If the the underlying connection is not active and the ping
parameter is set accordingly, the connection will be recreated
unless the connection is currently inside a transaction.
"""
if ping & self._ping:
try: # if possible, ping the connection
try: # pass a reconnect=False flag if this is supported
alive = self._con.ping(False)
except TypeError: # the reconnect flag is not supported
alive = self._con.ping()
except (AttributeError, IndexError, TypeError, ValueError):
self._ping = 0 # ping() is not available
alive = None
reconnect = False
except Exception:
alive = False
else:
if alive is None:
alive = True
if alive:
reconnect = False
if reconnect and not self._transaction:
try: # try to reopen the connection
con = self._create()
except Exception:
pass
else:
self._close()
self._store(con)
alive = True
return alive
def dbapi(self):
"""Return the underlying DB-API 2 module of the connection."""
if self._dbapi is None:
raise AttributeError(
"Could not determine DB-API 2 module"
" (please set creator.dbapi).")
return self._dbapi
def threadsafety(self):
"""Return the thread safety level of the connection."""
if self._threadsafety is None:
if self._dbapi is None:
raise AttributeError(
"Could not determine threadsafety"
" (please set creator.dbapi or creator.threadsafety).")
return 0
return self._threadsafety
def close(self):
"""Close the tough connection.
You are allowed to close a tough connection by default
and it will not complain if you close it more than once.
You can disallow closing connections by setting
the closeable parameter to something false. In this case,
closing tough connections will be silently ignored.
"""
if self._closeable:
self._close()
elif self._transaction:
self._reset()
def begin(self, *args, **kwargs):
"""Indicate the beginning of a transaction.
During a transaction, connections won't be transparently
replaced, and all errors will be raised to the application.
If the underlying driver supports this method, it will be called
with the given parameters (e.g. for distributed transactions).
"""
self._transaction = True
try:
begin = self._con.begin
except AttributeError:
pass
else:
begin(*args, **kwargs)
def commit(self):
"""Commit any pending transaction."""
self._transaction = False
try:
self._con.commit()
except self._failures as error: # cannot commit
try: # try to reopen the connection
con = self._create()
except Exception:
pass
else:
self._close()
self._store(con)
raise error # re-raise the original error
def rollback(self):
"""Rollback pending transaction."""
self._transaction = False
try:
self._con.rollback()
except self._failures as error: # cannot rollback
try: # try to reopen the connection
con = self._create()
except Exception:
pass
else:
self._close()
self._store(con)
raise error # re-raise the original error
def cancel(self):
"""Cancel a long-running transaction.
If the underlying driver supports this method, it will be called.
"""
self._transaction = False
try:
cancel = self._con.cancel
except AttributeError:
pass
else:
cancel()
def ping(self, *args, **kwargs):
"""Ping connection."""
return self._con.ping(*args, **kwargs)
def _cursor(self, *args, **kwargs):
"""A "tough" version of the method cursor()."""
# The args and kwargs are not part of the standard,
# but some database modules seem to use these.
transaction = self._transaction
if not transaction:
self._ping_check(2)
try:
# check whether the connection has been used too often
if (self._maxusage and self._usage >= self._maxusage
and not transaction):
raise self._failure
cursor = self._con.cursor(*args, **kwargs) # try to get a cursor
except self._failures as error: # error in getting cursor
try: # try to reopen the connection
con = self._create()
except Exception:
pass
else:
try: # and try one more time to get a cursor
cursor = con.cursor(*args, **kwargs)
except Exception:
pass
else:
self._close()
self._store(con)
if transaction:
raise error # re-raise the original error again
return cursor
try:
con.close()
except Exception:
pass
if transaction:
self._transaction = False
raise error # re-raise the original error again
return cursor
def cursor(self, *args, **kwargs):
"""Return a new Cursor Object using the connection."""
return SteadyDBCursor(self, *args, **kwargs)
def __del__(self):
"""Delete the steady connection."""
try:
self._close() # make sure the connection is closed
except: # builtin Exceptions might not exist any more
pass
class SteadyDBCursor:
"""A "tough" version of DB-API 2 cursors."""
def __init__(self, con, *args, **kwargs):
"""Create a "tough" DB-API 2 cursor."""
# basic initialization to make finalizer work
self._cursor = None
self._closed = True
# proper initialization of the cursor
self._con = con
self._args, self._kwargs = args, kwargs
self._clearsizes()
try:
self._cursor = con._cursor(*args, **kwargs)
except AttributeError:
raise TypeError(f"{con!r} is not a SteadyDBConnection.")
self._closed = False
def __enter__(self):
"""Enter the runtime context for the cursor object."""
return self
def __exit__(self, *exc):
"""Exit the runtime context for the cursor object."""
self.close()
def __iter__(self):
"""Make cursor compatible to the iteration protocol."""
cursor = self._cursor
try: # use iterator provided by original cursor
return iter(cursor)
except TypeError: # create iterator if not provided
return iter(cursor.fetchone, None)
def setinputsizes(self, sizes):
"""Store input sizes in case cursor needs to be reopened."""
self._inputsizes = sizes
def setoutputsize(self, size, column=None):
"""Store output sizes in case cursor needs to be reopened."""
self._outputsizes[column] = size
def _clearsizes(self):
"""Clear stored input and output sizes."""
self._inputsizes = []
self._outputsizes = {}
def _setsizes(self, cursor=None):
"""Set stored input and output sizes for cursor execution."""
if cursor is None:
cursor = self._cursor
if self._inputsizes:
cursor.setinputsizes(self._inputsizes)
for column, size in self._outputsizes.items():
if column is None:
cursor.setoutputsize(size)
else:
cursor.setoutputsize(size, column)
def close(self):
"""Close the tough cursor.
It will not complain if you close it more than once.
"""
if not self._closed:
try:
self._cursor.close()
except Exception:
pass
self._closed = True
def _get_tough_method(self, name):
"""Return a "tough" version of the given cursor method."""
def tough_method(*args, **kwargs):
execute = name.startswith('execute')
con = self._con
transaction = con._transaction
if not transaction:
con._ping_check(4)
try:
# check whether the connection has been used too often
if (con._maxusage and con._usage >= con._maxusage
and not transaction):
raise con._failure
if execute:
self._setsizes()
method = getattr(self._cursor, name)
result = method(*args, **kwargs) # try to execute
if execute:
self._clearsizes()
except con._failures as error: # execution error
if not transaction:
try:
cursor2 = con._cursor(
*self._args, **self._kwargs) # open new cursor
except Exception:
pass
else:
try: # and try one more time to execute
if execute:
self._setsizes(cursor2)
method = getattr(cursor2, name)
result = method(*args, **kwargs)
if execute:
self._clearsizes()
except Exception:
pass
else:
self.close()
self._cursor = cursor2
con._usage += 1
return result
try:
cursor2.close()
except Exception:
pass
try: # try to reopen the connection
con2 = con._create()
except Exception:
pass
else:
try:
cursor2 = con2.cursor(
*self._args, **self._kwargs) # open new cursor
except Exception:
pass
else:
if transaction:
self.close()
con._close()
con._store(con2)
self._cursor = cursor2
raise error # raise the original error again
error2 = None
try: # try one more time to execute
if execute:
self._setsizes(cursor2)
method2 = getattr(cursor2, name)
result = method2(*args, **kwargs)
if execute:
self._clearsizes()
except error.__class__: # same execution error
use2 = False
error2 = error
except Exception as error: # other execution errors
use2 = True
error2 = error
else:
use2 = True
if use2:
self.close()
con._close()
con._store(con2)
self._cursor = cursor2
con._usage += 1
if error2:
raise error2 # raise the other error
return result
try:
cursor2.close()
except Exception:
pass
try:
con2.close()
except Exception:
pass
if transaction:
self._transaction = False
raise error # re-raise the original error again
else:
con._usage += 1
return result
return tough_method
def __getattr__(self, name):
"""Inherit methods and attributes of underlying cursor."""
if self._cursor:
if name.startswith(('execute', 'call')):
# make execution methods "tough"
return self._get_tough_method(name)
return getattr(self._cursor, name)
raise InvalidCursor
def __del__(self):
"""Delete the steady cursor."""
try:
self.close() # make sure the cursor is closed
except: # builtin Exceptions might not exist any more
pass
DBUtils-Release-3_0_3/dbutils/steady_pg.py 0000664 0000000 0000000 00000025311 14422527151 0020547 0 ustar 00root root 0000000 0000000 """SteadyPg - hardened classic PyGreSQL connections.
Implements steady connections to a PostgreSQL database
using the classic (not DB-API 2 compliant) PyGreSQL API.
The connections are transparently reopened when they are
closed or the database connection has been lost or when
they are used more often than an optional usage limit.
Only connections which have been marked as being in a database
transaction with a begin() call will not be silently replaced.
A typical situation where database connections are lost
is when the database server or an intervening firewall is
shutdown and restarted for maintenance reasons. In such a
case, all database connections would become unusable, even
though the database service may be already available again.
The "hardened" connections provided by this module will
make the database connections immediately available again.
This results in a steady PostgreSQL connection that can be used
by PooledPg or PersistentPg to create pooled or persistent
connections to a PostgreSQL database in a threaded environment
such as the application server of "Webware for Python."
Note, however, that the connections themselves are not thread-safe.
For more information on PostgreSQL, see:
https://www.postgresql.org/
For more information on PyGreSQL, see:
http://www.pygresql.org
For more information on Webware for Python, see:
https://webwareforpython.github.io/w4py/
Usage:
You can use the class SteadyPgConnection in the same way as you
would use the class DB from the classic PyGreSQL API module db.
The only difference is that you may specify a usage limit as the
first parameter when you open a connection (set it to None
if you prefer unlimited usage), and an optional list of commands
that may serve to prepare the session as the second parameter,
and you can specify whether is is allowed to close the connection
(by default this is true). When the connection to the PostgreSQL
database is lost or has been used too often, it will be automatically
reset, without further notice.
from dbutils.steady_pg import SteadyPgConnection
db = SteadyPgConnection(10000, ["set datestyle to german"],
host=..., dbname=..., user=..., ...)
...
result = db.query('...')
...
db.close()
Ideas for improvement:
* Alternatively to the maximum number of uses,
implement a maximum time to live for connections.
* Optionally log usage and loss of connection.
Copyright, credits and license:
* Contributed as supplement for Webware for Python and PyGreSQL
by Christoph Zwerschke in September 2005
Licensed under the MIT license.
"""
from pg import DB as PgConnection
from . import __version__
class SteadyPgError(Exception):
"""General SteadyPg error."""
class InvalidConnection(SteadyPgError):
"""Database connection is invalid."""
class SteadyPgConnection:
"""Class representing steady connections to a PostgreSQL database.
Underlying the connection is a classic PyGreSQL pg API database
connection which is reset if the connection is lost or used too often.
Thus the resulting connection is steadier ("tough and self-healing").
If you want the connection to be persistent in a threaded environment,
then you should not deal with this class directly, but use either the
PooledPg module or the PersistentPg module to get the connections.
"""
version = __version__
def __init__(
self, maxusage=None, setsession=None, closeable=True,
*args, **kwargs):
"""Create a "tough" PostgreSQL connection.
maxusage: maximum usage limit for the underlying PyGreSQL connection
(number of uses, 0 or None means unlimited usage)
When this limit is reached, the connection is automatically reset.
setsession: optional list of SQL commands that may serve to prepare
the session, e.g. ["set datestyle to ...", "set time zone ..."]
closeable: if this is set to false, then closing the connection will
be silently ignored, but by default the connection can be closed
args, kwargs: the parameters that shall be used to establish
the PostgreSQL connections with PyGreSQL using pg.DB()
"""
# basic initialization to make finalizer work
self._con = None
self._closed = True
# proper initialization of the connection
if maxusage is None:
maxusage = 0
if not isinstance(maxusage, int):
raise TypeError("'maxusage' must be an integer value.")
self._maxusage = maxusage
self._setsession_sql = setsession
self._closeable = closeable
self._con = PgConnection(*args, **kwargs)
self._transaction = False
self._closed = False
self._setsession()
self._usage = 0
def __enter__(self):
"""Enter the runtime context. This will start a transaction."""
self.begin()
return self
def __exit__(self, *exc):
"""Exit the runtime context. This will end the transaction."""
if exc[0] is None and exc[1] is None and exc[2] is None:
self.commit()
else:
self.rollback()
def _setsession(self):
"""Execute the SQL commands for session preparation."""
if self._setsession_sql:
for sql in self._setsession_sql:
self._con.query(sql)
def _close(self):
"""Close the tough connection.
You can always close a tough connection with this method
and it will not complain if you close it more than once.
"""
if not self._closed:
try:
self._con.close()
except Exception:
pass
self._transaction = False
self._closed = True
def close(self):
"""Close the tough connection.
You are allowed to close a tough connection by default
and it will not complain if you close it more than once.
You can disallow closing connections by setting
the closeable parameter to something false. In this case,
closing tough connections will be silently ignored.
"""
if self._closeable:
self._close()
elif self._transaction:
self.reset()
def reopen(self):
"""Reopen the tough connection.
It will not complain if the connection cannot be reopened.
"""
try:
self._con.reopen()
except Exception:
if self._transaction:
self._transaction = False
try:
self._con.query('rollback')
except Exception:
pass
else:
self._transaction = False
self._closed = False
self._setsession()
self._usage = 0
def reset(self):
"""Reset the tough connection.
If a reset is not possible, tries to reopen the connection.
It will not complain if the connection is already closed.
"""
try:
self._con.reset()
self._transaction = False
self._setsession()
self._usage = 0
except Exception:
try:
self.reopen()
except Exception:
try:
self.rollback()
except Exception:
pass
def begin(self, sql=None):
"""Begin a transaction."""
self._transaction = True
try:
begin = self._con.begin
except AttributeError:
return self._con.query(sql or 'begin')
else:
# use existing method if available
return begin(sql=sql) if sql else begin()
def end(self, sql=None):
"""Commit the current transaction."""
self._transaction = False
try:
end = self._con.end
except AttributeError:
return self._con.query(sql or 'end')
else:
return end(sql=sql) if sql else end()
def commit(self, sql=None):
"""Commit the current transaction."""
self._transaction = False
try:
commit = self._con.commit
except AttributeError:
return self._con.query(sql or 'commit')
else:
return commit(sql=sql) if sql else commit()
def rollback(self, sql=None):
"""Rollback the current transaction."""
self._transaction = False
try:
rollback = self._con.rollback
except AttributeError:
return self._con.query(sql or 'rollback')
else:
return rollback(sql=sql) if sql else rollback()
def _get_tough_method(self, method):
"""Return a "tough" version of a connection class method.
The tough version checks whether the connection is bad (lost)
and automatically and transparently tries to reset the connection
if this is the case (for instance, the database has been restarted).
"""
def tough_method(*args, **kwargs):
transaction = self._transaction
if not transaction:
try:
# check whether connection status is bad
# or the connection has been used too often
if not self._con.db.status or (
self._maxusage and self._usage >= self._maxusage):
raise AttributeError
except Exception:
self.reset() # then reset the connection
try:
result = method(*args, **kwargs) # try connection method
except Exception: # error in query
if transaction: # inside a transaction
self._transaction = False
raise # propagate the error
if self._con.db.status: # if it was not a connection problem
raise # then propagate the error
self.reset() # reset the connection
result = method(*args, **kwargs) # and try one more time
self._usage += 1
return result
return tough_method
def __getattr__(self, name):
"""Inherit the members of the standard connection class.
Some methods are made "tougher" than in the standard version.
"""
if self._con:
attr = getattr(self._con, name)
if (name in ('query', 'get', 'insert', 'update', 'delete')
or name.startswith('get_')):
attr = self._get_tough_method(attr)
return attr
raise InvalidConnection
def __del__(self):
"""Delete the steady connection."""
try:
self._close() # make sure the connection is closed
except: # builtin Exceptions might not exist any more
pass
DBUtils-Release-3_0_3/docs/ 0000775 0000000 0000000 00000000000 14422527151 0015476 5 ustar 00root root 0000000 0000000 DBUtils-Release-3_0_3/docs/changelog.html 0000664 0000000 0000000 00000026401 14422527151 0020316 0 ustar 00root root 0000000 0000000
Changelog for DBUtils
Changelog for DBUtils
3.0.3
DBUtils 3.0.3 was released on April 27, 2023.
Changes:
Support Python version 3.11.
Improve determination of DB API module if creator is specified.
Minor fixes and section an advanced usage in docs.
3.0.2
DBUtils 3.0.2 was released on January 14, 2022.
The optional iterator protocol on cursors is now supported.
3.0.1
DBUtils 3.0.1 was released on December 22, 2021.
It includes InterfaceError to the default list of exceptions
for which the connection failover mechanism is applied.
You can override this with the failures parameter.
3.0.0
DBUtils 3.0.0 was released on November 26, 2021.
It is intended to be used with Python versions 3.6 to 3.10.
Changes:
Cease support for Python 2 and 3.5, minor optimizations.
2.0.3
DBUtils 2.0.3 was released on November 26, 2021.
Changes:
Support Python version 3.10.
2.0.2
DBUtils 2.0.2 was released on June 8, 2021.
Changes:
Allow using context managers for pooled connections.
2.0.1
DBUtils 2.0.1 was released on April 8, 2021.
Changes:
Avoid "name Exception is not defined" when exiting.
2.0
DBUtils 2.0 was released on September 26, 2020.
It is intended to be used with Python versions 2.7 and 3.5 to 3.9.
Changes:
DBUtils does not act as a Webware plugin anymore, it is now just an ordinary
Python package (of course it could be used as such also before).
The Webware Examples folder has been removed.
Folders, packages and modules have been renamed to lower-case.
Particularly, you need to import dbutils instead of DBUtils now.
The internal naming conventions have also been changed to comply with PEP8.
The documentation has been adapted to reflect the changes in this version.
This changelog has been compiled from the former release notes.
1.4
DBUtils 1.4 was released on September 26, 2020.
It is intended to be used with Python versions 2.7 and 3.5 to 3.9.
Improvements:
The SteadyDB and SteadyPg classes only reconnect after the
maxusage limit has been reached when the connection is not currently
inside a transaction.
1.3
DBUtils 1.3 was released on March 3, 2018.
It is intended to be used with Python versions 2.6, 2.7 and 3.4 to 3.7.
Improvements:
This version now supports context handlers for connections and cursors.
1.2
DBUtils 1.2 was released on February 5, 2017.
It is intended to be used with Python versions 2.6, 2.7 and 3.0 to 3.6.
1.1.1
DBUtils 1.1.1 was released on February 4, 2017.
It is intended to be used with Python versions 2.3 to 2.7.
Improvements:
Reopen SteadyDB connections when commit or rollback fails
(suggested by Ben Hoyt).
Bugfixes:
Fixed a problem when running under Jython (reported by Vitaly Kruglikov).
1.1
DBUtils 1.1 was released on August 14, 2011.
Improvements:
The transparent reopening of connections is actually an undesired behavior
if it happens during database transactions. In these cases, the transaction
should fail and the error be reported back to the application instead of the
rest of the transaction being executed in a new connection and therefore in
a new transaction. Therefore DBUtils now allows suspending the transparent
reopening during transactions. All you need to do is indicate the beginning
of a transaction by calling the begin() method of the connection.
DBUtils makes sure that this method always exists, even if the database
driver does not support it.
If the database driver supports a ping() method, then DBUtils can use it
to check whether connections are alive instead of just trying to use the
connection and reestablishing it in case it was dead. Since these checks are
done at the expense of some performance, you have exact control when these
are executed via the new ping parameter.
PooledDB has got another new parameter reset for controlling how
connections are reset before being put back into the pool.
Bugfixes:
Fixed propagation of error messages when the connection was lost.
Fixed an issue with the setoutputsize() cursor method.
Fixed some minor issues with the DBUtilsExample for Webware.
1.0
DBUtils 1.0 was released on November 29, 2008.
It is intended to be used with Python versions 2.2 to 2.6.
Changes:
Added a failures parameter for configuring the exception classes for
which the failover mechanisms is applied (as suggested by Matthew Harriger).
Added a closeable parameter for configuring whether connections can be
closed (otherwise closing connections will be silently ignored).
It is now possible to override defaults via the creator.dbapi and
creator.threadsafety attributes.
Added an alias method dedicated_connection as a shorthand for
connection(shareable=False).
Added a version attribute to all exported classes.
Where the value 0 has the meaning "unlimited", parameters can now be also
set to the value None instead.
It turned out that threading.local does not work properly with
mod_wsgi, so we use the Python implementation for thread-local data
even when a faster threading.local implementation is available.
A new parameter threadlocal allows you to pass an arbitrary class
such as threading.local if you know it works in your environment.
Bugfixes and improvements:
In some cases, when instance initialization failed or referenced objects
were already destroyed, finalizers could throw exceptions or create infinite
recursion (problem reported by Gregory Pinero and Jehiah Czebotar).
DBUtils now tries harder to find the underlying DB-API 2 module if only a
connection creator function is specified. This had not worked before with
the MySQLdb module (problem reported by Gregory Pinero).
0.9.4
DBUtils 0.9.4 was released on July 7, 2007.
This release fixes a problem in the destructor code and has been supplemented
with a German User's Guide.
Again, please note that the dbapi parameter has been renamed to creator
in the last release, since you can now pass custom creator functions
for database connections instead of DB-API 2 modules.
0.9.3
DBUtils 0.9.3 was released on May 21, 2007.
Changes:
Support custom creator functions for database connections.
These can now be used as the first parameter instead of an DB-API module
(suggested by Ezio Vernacotola).
Some fixes in the documentation.
Added Chinese translation of the User's Guide, kindly contributed by gashero.
0.9.2
DBUtils 0.9.2 was released on September 22, 2006.
It is intended to be used with Python versions 2.2 to 2.5.
Changes:
Renamed SolidDB to SteadyDB to avoid confusion with the "solidDB"
storage engine. Accordingly, renamed SolidPg to SteadyPg.
0.9.1
DBUtils 0.9.1 was released on May 8, 2006.
It is intended to be used with Python versions 2.2 to 2.4.
Changes:
Added _closeable attribute and made persistent connections not closeable
by default. This allows PersistentDB to be used in the same way as you
would use PooledDB.
Allowed arguments in the DB-API 2 cursor() method. MySQLdb is using this
to specify cursor classes. (Suggested by Michael Palmer.)
Improved the documentation and added a User's Guide.
0.8.1 - 2005-09-13
DBUtils 0.8.1 was released on September 13, 2005.
It is intended to be used with Python versions 2.0 to 2.4.