pax_global_header00006660000000000000000000000064145755654270014536gustar00rootroot0000000000000052 comment=58b9fb4f1d0741929369a0f97e771aa45007baa8 WebwareForPython-DBUtils-ed2a1f2/000077500000000000000000000000001457556542700167755ustar00rootroot00000000000000WebwareForPython-DBUtils-ed2a1f2/.bumpversion.cfg000066400000000000000000000011331457556542700221030ustar00rootroot00000000000000[bumpversion] current_version = 3.1.0 [bumpversion:file:pyproject.toml] search = version = "{current_version}" replace = version = "{new_version}" [bumpversion:file:dbutils/__init__.py] search = __version__ = '{current_version}' replace = __version__ = '{new_version}' [bumpversion:file:README.md] search = The current version {current_version} replace = The current version {new_version} [bumpversion:file:docs/main.rst] search = :Version: {current_version} search = :Version: {new_version} [bumpversion:file:docs/main.de.rst] search = :Version: {current_version} search = :Version: {new_version} WebwareForPython-DBUtils-ed2a1f2/.gitattributes000066400000000000000000000006411457556542700216710ustar00rootroot00000000000000* text=auto eol=lf *.bat text eol=crlf *.config text eol=lf *.css text eol=lf *.html text eol=lf *.js text eol=lf *.prefs text *.py text eol=lf *.rst text eol=lf *.sh text eol=lf *.txt text eol=lf *.po text eol=lf *.pot text eol=lf *.styl text eol=lf *.xml text *.gif binary *.ico binary *.jpg binary *.lnk binary *.mo binary *.png binary *.exe binary *.so binary *.ppt binary *.pdf binary *.gz binary *.zip binary WebwareForPython-DBUtils-ed2a1f2/.github/000077500000000000000000000000001457556542700203355ustar00rootroot00000000000000WebwareForPython-DBUtils-ed2a1f2/.github/workflows/000077500000000000000000000000001457556542700223725ustar00rootroot00000000000000WebwareForPython-DBUtils-ed2a1f2/.github/workflows/publish_on_pypi.yml000066400000000000000000000011161457556542700263170ustar00rootroot00000000000000name: Publish DBUtils on PyPI on: push: tags: - 'Release-*' jobs: publish: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v5 with: python-version: "3.11" - name: Install build tool run: python -m pip install build --user - name: Build source tarball and wheel run: python -m build - name: Publish distribution to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} WebwareForPython-DBUtils-ed2a1f2/.github/workflows/test_with_tox.yml000066400000000000000000000010071457556542700260170ustar00rootroot00000000000000name: Test DBUtils using tox on: [push, pull_request] jobs: test: runs-on: ubuntu-latest strategy: matrix: python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] steps: - uses: actions/checkout@v4 - name: Setup Python ${{ matrix.python }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} - run: pip install tox - run: tox -e py - if: matrix.python == 3.11 run: TOXENV=ruff,manifest,docs,spell tox WebwareForPython-DBUtils-ed2a1f2/.gitignore000066400000000000000000000002301457556542700207600ustar00rootroot00000000000000*~ *.bak *.default *.egg-info *.log *.patch *.pid *.pstats *.pyc *.pyo *.swp build dist local .idea .tox .pytest_cache test.bat MANIFEST Thumbs.db WebwareForPython-DBUtils-ed2a1f2/LICENSE000066400000000000000000000020761457556542700200070ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2024 Christoph Zwerschke Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. WebwareForPython-DBUtils-ed2a1f2/MANIFEST.in000066400000000000000000000004061457556542700205330ustar00rootroot00000000000000include MANIFEST.in include LICENSE include README.md include .bumpversion.cfg include pyproject.toml include tox.ini recursive-include tests *.py recursive-include docs *.rst make.py *.html *.css *.png prune docs/_build global-exclude *.py[co] __pycache__ WebwareForPython-DBUtils-ed2a1f2/README.md000066400000000000000000000011121457556542700202470ustar00rootroot00000000000000DBUtils ======= DBUtils is a suite of tools providing solid, persistent and pooled connections to a database that can be used in all kinds of multi-threaded environments. The suite supports DB-API 2 compliant database interfaces and the classic PyGreSQL interface. The current version 3.1.0 of DBUtils supports Python versions 3.7 to 3.12. **Please have a look at the [changelog](https://webwareforpython.github.io/DBUtils/changelog.html), because there were some breaking changes in version 2.0.** The DBUtils home page can be found at https://webwareforpython.github.io/DBUtils/ WebwareForPython-DBUtils-ed2a1f2/dbutils/000077500000000000000000000000001457556542700204435ustar00rootroot00000000000000WebwareForPython-DBUtils-ed2a1f2/dbutils/__init__.py000066400000000000000000000003351457556542700225550ustar00rootroot00000000000000"""The DBUtils main package.""" __all__ = [ '__version__', 'simple_pooled_pg', 'steady_pg', 'pooled_pg', 'persistent_pg', 'simple_pooled_db', 'steady_db', 'pooled_db', 'persistent_db'] __version__ = '3.1.0' WebwareForPython-DBUtils-ed2a1f2/dbutils/persistent_db.py000066400000000000000000000233551457556542700236720ustar00rootroot00000000000000"""PersistentDB - persistent DB-API 2 connections. Implements steady, thread-affine persistent connections to a database based on an arbitrary DB-API 2 compliant database interface module. This should result in a speedup for persistent applications such as the application server of "Webware for Python," without loss of robustness. Robustness is provided by using "hardened" SteadyDB connections. Even if the underlying database is restarted and all connections are lost, they will be automatically and transparently reopened. However, since you don't want this to happen in the middle of a database transaction, you must explicitly start transactions with the begin() method so that SteadyDB knows that the underlying connection shall not be replaced and errors passed on until the transaction is completed. Measures are taken to make the database connections thread-affine. This means the same thread always uses the same cached connection, and no other thread will use it. So even if the underlying DB-API module is not thread-safe at the connection level this will be no problem here. For best performance, the application server should keep threads persistent. For this, you have to set MinServerThreads = MaxServerThreads in Webware. For the Python DB-API 2 specification, see: https://www.python.org/dev/peps/pep-0249/ For information on Webware for Python, see: https://webwareforpython.github.io/w4py/ Usage: First you need to set up a generator for your kind of database connections by creating an instance of PersistentDB, passing the following parameters: creator: either an arbitrary function returning new DB-API 2 connection objects or a DB-API 2 compliant database module maxusage: the maximum number of reuses of a single connection (the default of 0 or None means unlimited reuse) Whenever the limit is reached, the connection will be reset. setsession: an optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to german", ...]. failures: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, if the default (OperationalError, InterfaceError, InternalError) is not adequate for the used database module ping: an optional flag controlling when connections are checked with the ping() method if such a method is available (0 = None = never, 1 = default = whenever it is requested, 2 = when a cursor is created, 4 = when a query is executed, 7 = always, and all other bit combinations of these values) closeable: if this is set to true, then closing connections will be allowed, but by default this will be silently ignored threadlocal: an optional class for representing thread-local data that will be used instead of our Python implementation (threading.local is faster, but cannot be used in all cases) The creator function or the connect function of the DB-API 2 compliant database module specified as the creator will receive any additional parameters such as the host, database, user, password etc. You may choose some or all of these parameters in your own creator function, allowing for sophisticated failover and load-balancing mechanisms. For instance, if you are using pgdb as your DB-API 2 database module and want every connection to your local database 'mydb' to be reused 1000 times: import pgdb # import used DB-API 2 module from dbutils.persistent_db import PersistentDB persist = PersistentDB(pgdb, 1000, database='mydb') Once you have set up the generator with these parameters, you can request database connections of that kind: db = persist.connection() You can use these connections just as if they were ordinary DB-API 2 connections. Actually what you get is the hardened SteadyDB version of the underlying DB-API 2 connection. Closing a persistent connection with db.close() will be silently ignored since it would be reopened at the next usage anyway and contrary to the intent of having persistent connections. Instead, the connection will be automatically closed when the thread dies. You can change this behavior by setting the closeable parameter. Note that you need to explicitly start transactions by calling the begin() method. This ensures that the transparent reopening will be suspended until the end of the transaction, and that the connection will be rolled back before being reused by the same thread. By setting the threadlocal parameter to threading.local, getting connections may become a bit faster, but this may not work in all environments (for instance, mod_wsgi is known to cause problems since it clears the threading.local data between requests). Ideas for improvement: * Add a thread for monitoring, restarting (or closing) bad or expired connections (similar to DBConnectionPool/ResourcePool by Warren Smith). * Optionally log usage, bad connections and exceeding of limits. Copyright, credits and license: * Contributed as supplement for Webware for Python and PyGreSQL by Christoph Zwerschke in September 2005 * Based on an idea presented on the Webware developer mailing list by Geoffrey Talvola in July 2005 Licensed under the MIT license. """ from . import __version__ from .steady_db import connect try: # Prefer the pure Python version of threading.local. # The C implementation turned out to be problematic with mod_wsgi, # since it does not keep the thread-local data between requests. from _threading_local import local except ImportError: # Fall back to the default version of threading.local. from threading import local class PersistentDBError(Exception): """General PersistentDB error.""" class NotSupportedError(PersistentDBError): """DB-API module not supported by PersistentDB.""" class PersistentDB: """Generator for persistent DB-API 2 connections. After you have created the connection pool, you can use connection() to get thread-affine, steady DB-API 2 connections. """ version = __version__ def __init__( self, creator, maxusage=None, setsession=None, failures=None, ping=1, closeable=False, threadlocal=None, *args, **kwargs): """Set up the persistent DB-API 2 connection generator. creator: either an arbitrary function returning new DB-API 2 connection objects or a DB-API 2 compliant database module maxusage: maximum number of reuses of a single connection (number of database operations, 0 or None means unlimited) Whenever the limit is reached, the connection will be reset. setsession: optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to ...", "set time zone ..."] failures: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, if the default (OperationalError, InterfaceError, InternalError) is not adequate for the used database module ping: determines when the connection should be checked with ping() (0 = None = never, 1 = default = whenever it is requested, 2 = when a cursor is created, 4 = when a query is executed, 7 = always, and all other bit combinations of these values) closeable: if this is set to true, then closing connections will be allowed, but by default this will be silently ignored threadlocal: an optional class for representing thread-local data that will be used instead of our Python implementation (threading.local is faster, but cannot be used in all cases) args, kwargs: the parameters that shall be passed to the creator function or the connection constructor of the DB-API 2 module """ try: threadsafety = creator.threadsafety except AttributeError: try: threadsafety = creator.dbapi.threadsafety except AttributeError: try: if not callable(creator.connect): raise AttributeError except AttributeError: threadsafety = 1 else: threadsafety = 0 if not threadsafety: raise NotSupportedError("Database module is not thread-safe.") self._creator = creator self._maxusage = maxusage self._setsession = setsession self._failures = failures self._ping = ping self._closeable = closeable self._args, self._kwargs = args, kwargs self.thread = (threadlocal or local)() def steady_connection(self): """Get a steady, non-persistent DB-API 2 connection.""" return connect( self._creator, self._maxusage, self._setsession, self._failures, self._ping, self._closeable, *self._args, **self._kwargs) def connection(self, shareable=False): # noqa: ARG002 """Get a steady, persistent DB-API 2 connection. The shareable parameter exists only for compatibility with the PooledDB connection method. In reality, persistent connections are of course never shared with other threads. """ try: con = self.thread.connection except AttributeError as error: con = self.steady_connection() if not con.threadsafety(): raise NotSupportedError( "Database module is not thread-safe.") from error self.thread.connection = con con._ping_check() return con def dedicated_connection(self): """Alias for connection(shareable=False).""" return self.connection() WebwareForPython-DBUtils-ed2a1f2/dbutils/persistent_pg.py000066400000000000000000000155721457556542700237150ustar00rootroot00000000000000"""PersistentPg - persistent classic PyGreSQL connections. Implements steady, thread-affine persistent connections to a PostgreSQL database using the classic (not DB-API 2 compliant) PyGreSQL API. This should result in a speedup for persistent applications such as the application server of "Webware for Python," without loss of robustness. Robustness is provided by using "hardened" SteadyPg connections. Even if the underlying database is restarted and all connections are lost, they will be automatically and transparently reopened. However, since you don't want this to happen in the middle of a database transaction, you must explicitly start transactions with the begin() method so that SteadyPg knows that the underlying connection shall not be replaced and errors passed on until the transaction is completed. Measures are taken to make the database connections thread-affine. This means the same thread always uses the same cached connection, and no other thread will use it. So the fact that the classic PyGreSQL pg module is not thread-safe at the connection level is no problem here. For best performance, the application server should keep threads persistent. For this, you have to set MinServerThreads = MaxServerThreads in Webware. For more information on PostgreSQL, see: https://www.postgresql.org/ For more information on PyGreSQL, see: http://www.pygresql.org For more information on Webware for Python, see: https://webwareforpython.github.io/w4py/ Usage: First you need to set up a generator for your kind of database connections by creating an instance of PersistentPg, passing the following parameters: maxusage: the maximum number of reuses of a single connection (the default of 0 or None means unlimited reuse) When this maximum usage number of the connection is reached, the connection is automatically reset (closed and reopened). setsession: An optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to german", ...] closeable: if this is set to true, then closing connections will be allowed, but by default this will be silently ignored threadlocal: an optional class for representing thread-local data that will be used instead of our Python implementation (threading.local is faster, but cannot be used in all cases) Additionally, you have to pass the parameters for the actual PostgreSQL connection which are passed via PyGreSQL, such as the names of the host, database, user, password etc. For instance, if you want every connection to your local database 'mydb' to be reused 1000 times: from dbutils.persistent_pg import PersistentPg persist = PersistentPg(5, dbname='mydb') Once you have set up the generator with these parameters, you can request database connections of that kind: db = persist.connection() You can use these connections just as if they were ordinary classic PyGreSQL API connections. Actually what you get is the hardened SteadyPg version of a classic PyGreSQL connection. Closing a persistent connection with db.close() will be silently ignored since it would be reopened at the next usage anyway and contrary to the intent of having persistent connections. Instead, the connection will be automatically closed when the thread dies. You can change this behavior by setting the closeable parameter. Note that you need to explicitly start transactions by calling the begin() method. This ensures that the transparent reopening will be suspended until the end of the transaction, and that the connection will be rolled back before being reused in the same thread. To end transactions, use one of the end(), commit() or rollback() methods. By setting the threadlocal parameter to threading.local, getting connections may become a bit faster, but this may not work in all environments (for instance, mod_wsgi is known to cause problems since it clears the threading.local data between requests). Ideas for improvement: * Add a thread for monitoring, restarting (or closing) bad or expired connections (similar to DBConnectionPool/ResourcePool by Warren Smith). * Optionally log usage, bad connections and exceeding of limits. Copyright, credits and license: * Contributed as supplement for Webware for Python and PyGreSQL by Christoph Zwerschke in September 2005 * Based on an idea presented on the Webware developer mailing list by Geoffrey Talvola in July 2005 Licensed under the MIT license. """ from . import __version__ from .steady_pg import SteadyPgConnection try: # Prefer the pure Python version of threading.local. # The C implementation turned out to be problematic with mod_wsgi, # since it does not keep the thread-local data between requests. from _threading_local import local except ImportError: # Fall back to the default version of threading.local. from threading import local class PersistentPg: """Generator for persistent classic PyGreSQL connections. After you have created the connection pool, you can use connection() to get thread-affine, steady PostgreSQL connections. """ version = __version__ def __init__( self, maxusage=None, setsession=None, closeable=False, threadlocal=None, *args, **kwargs): """Set up the persistent PostgreSQL connection generator. maxusage: maximum number of reuses of a single connection (0 or None means unlimited reuse) When this maximum usage number of the connection is reached, the connection is automatically reset (closed and reopened). setsession: optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to ...", "set time zone ..."] closeable: if this is set to true, then closing connections will be allowed, but by default this will be silently ignored threadlocal: an optional class for representing thread-local data that will be used instead of our Python implementation (threading.local is faster, but cannot be used in all cases) args, kwargs: the parameters that shall be used to establish the PostgreSQL connections using class PyGreSQL pg.DB() """ self._maxusage = maxusage self._setsession = setsession self._closeable = closeable self._args, self._kwargs = args, kwargs self.thread = (threadlocal or local)() def steady_connection(self): """Get a steady, non-persistent PyGreSQL connection.""" return SteadyPgConnection( self._maxusage, self._setsession, self._closeable, *self._args, **self._kwargs) def connection(self): """Get a steady, persistent PyGreSQL connection.""" try: con = self.thread.connection except AttributeError: con = self.steady_connection() self.thread.connection = con return con WebwareForPython-DBUtils-ed2a1f2/dbutils/pooled_db.py000066400000000000000000000516011457556542700227470ustar00rootroot00000000000000"""PooledDB - pooling for DB-API 2 connections. Implements a pool of steady, thread-safe cached connections to a database which are transparently reused, using an arbitrary DB-API 2 compliant database interface module. This should result in a speedup for persistent applications such as the application server of "Webware for Python," without loss of robustness. Robustness is provided by using "hardened" SteadyDB connections. Even if the underlying database is restarted and all connections are lost, they will be automatically and transparently reopened. However, since you don't want this to happen in the middle of a database transaction, you must explicitly start transactions with the begin() method so that SteadyDB knows that the underlying connection shall not be replaced and errors passed on until the transaction is completed. Measures are taken to make the pool of connections thread-safe. If the underlying DB-API module is thread-safe at the connection level, the requested connections may be shared with other threads by default, but you can also request dedicated connections in case you need them. For the Python DB-API 2 specification, see: https://www.python.org/dev/peps/pep-0249/ For information on Webware for Python, see: https://webwareforpython.github.io/w4py/ Usage: First you need to set up the database connection pool by creating an instance of PooledDB, passing the following parameters: creator: either an arbitrary function returning new DB-API 2 connection objects or a DB-API 2 compliant database module mincached: the initial number of idle connections in the pool (the default of 0 means no connections are made at startup) maxcached: the maximum number of idle connections in the pool (the default value of 0 or None means unlimited pool size) maxshared: maximum number of shared connections allowed (the default value of 0 or None means all connections are dedicated) When this maximum number is reached, connections are shared if they have been requested as shareable. maxconnections: maximum number of connections generally allowed (the default value of 0 or None means any number of connections) blocking: determines behavior when exceeding the maximum (if this is set to true, block and wait until the number of connections decreases, but by default an error will be reported) maxusage: maximum number of reuses of a single connection (the default of 0 or None means unlimited reuse) When this maximum usage number of the connection is reached, the connection is automatically reset (closed and reopened). setsession: an optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to german", ...] reset: how connections should be reset when returned to the pool (False or None to rollback transactions started with begin(), the default value True always issues a rollback for safety's sake) failures: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, if the default (OperationalError, InterfaceError, InternalError) is not adequate for the used database module ping: an optional flag controlling when connections are checked with the ping() method if such a method is available (0 = None = never, 1 = default = whenever fetched from the pool, 2 = when a cursor is created, 4 = when a query is executed, 7 = always, and all other bit combinations of these values) The creator function or the connect function of the DB-API 2 compliant database module specified as the creator will receive any additional parameters such as the host, database, user, password etc. You may choose some or all of these parameters in your own creator function, allowing for sophisticated failover and load-balancing mechanisms. For instance, if you are using pgdb as your DB-API 2 database module and want a pool of at least five connections to your local database 'mydb': import pgdb # import used DB-API 2 module from dbutils.pooled_db import PooledDB pool = PooledDB(pgdb, 5, database='mydb') Once you have set up the connection pool you can request database connections from that pool: db = pool.connection() You can use these connections just as if they were ordinary DB-API 2 connections. Actually what you get is the hardened SteadyDB version of the underlying DB-API 2 connection. Please note that the connection may be shared with other threads by default if you set a non-zero maxshared parameter and the DB-API 2 module allows this. If you want to have a dedicated connection, use: db = pool.connection(shareable=False) You can also use this to get a dedicated connection: db = pool.dedicated_connection() If you don't need it anymore, you should immediately return it to the pool with db.close(). You can get another connection in the same way. Warning: In a threaded environment, never do the following: pool.connection().cursor().execute(...) This would release the connection too early for reuse which may be fatal if the connections are not thread-safe. Make sure that the connection object stays alive as long as you are using it, like that: db = pool.connection() cur = db.cursor() cur.execute(...) res = cur.fetchone() cur.close() # or del cur db.close() # or del db You can also use context managers for simpler code: with pool.connection() as db: with db.cursor as cur: cur.execute(...) res = cur.fetchone() Note that you need to explicitly start transactions by calling the begin() method. This ensures that the connection will not be shared with other threads, that the transparent reopening will be suspended until the end of the transaction, and that the connection will be rolled back before being given back to the connection pool. Ideas for improvement: * Add a thread for monitoring, restarting (or closing) bad or expired connections (similar to DBConnectionPool/ResourcePool by Warren Smith). * Optionally log usage, bad connections and exceeding of limits. Copyright, credits and license: * Contributed as supplement for Webware for Python and PyGreSQL by Christoph Zwerschke in September 2005 * Based on the code of DBPool, contributed to Webware for Python by Dan Green in December 2000 Licensed under the MIT license. """ from contextlib import suppress from functools import total_ordering from threading import Condition from . import __version__ from .steady_db import connect class PooledDBError(Exception): """General PooledDB error.""" class InvalidConnectionError(PooledDBError): """Database connection is invalid.""" class NotSupportedError(PooledDBError): """DB-API module not supported by PooledDB.""" class TooManyConnectionsError(PooledDBError): """Too many database connections were opened.""" # deprecated alias names for error classes InvalidConnection = InvalidConnectionError TooManyConnections = TooManyConnectionsError class PooledDB: """Pool for DB-API 2 connections. After you have created the connection pool, you can use connection() to get pooled, steady DB-API 2 connections. """ version = __version__ def __init__( self, creator, mincached=0, maxcached=0, maxshared=0, maxconnections=0, blocking=False, maxusage=None, setsession=None, reset=True, failures=None, ping=1, *args, **kwargs): """Set up the DB-API 2 connection pool. creator: either an arbitrary function returning new DB-API 2 connection objects or a DB-API 2 compliant database module mincached: initial number of idle connections in the pool (0 means no connections are made at startup) maxcached: maximum number of idle connections in the pool (0 or None means unlimited pool size) maxshared: maximum number of shared connections (0 or None means all connections are dedicated) When this maximum number is reached, connections are shared if they have been requested as shareable. maxconnections: maximum number of connections generally allowed (0 or None means an arbitrary number of connections) blocking: determines behavior when exceeding the maximum (if this is set to true, block and wait until the number of connections decreases, otherwise an error will be reported) maxusage: maximum number of reuses of a single connection (0 or None means unlimited reuse) When this maximum usage number of the connection is reached, the connection is automatically reset (closed and reopened). setsession: optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to ...", "set time zone ..."] reset: how connections should be reset when returned to the pool (False or None to rollback transactions started with begin(), True to always issue a rollback for safety's sake) failures: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, if the default (OperationalError, InterfaceError, InternalError) is not adequate for the used database module ping: determines when the connection should be checked with ping() (0 = None = never, 1 = default = whenever fetched from the pool, 2 = when a cursor is created, 4 = when a query is executed, 7 = always, and all other bit combinations of these values) args, kwargs: the parameters that shall be passed to the creator function or the connection constructor of the DB-API 2 module """ try: threadsafety = creator.threadsafety except AttributeError: try: threadsafety = creator.dbapi.threadsafety except AttributeError: try: if not callable(creator.connect): raise AttributeError except AttributeError: threadsafety = 1 else: threadsafety = 0 if not threadsafety: raise NotSupportedError("Database module is not thread-safe.") self._creator = creator self._args, self._kwargs = args, kwargs self._blocking = blocking self._maxusage = maxusage self._setsession = setsession self._reset = reset self._failures = failures self._ping = ping if mincached is None: mincached = 0 if maxcached is None: maxcached = 0 if maxconnections is None: maxconnections = 0 if maxcached: if maxcached < mincached: maxcached = mincached self._maxcached = maxcached else: self._maxcached = 0 if threadsafety > 1 and maxshared: self._maxshared = maxshared self._shared_cache = [] # the cache for shared connections else: self._maxshared = 0 if maxconnections: if maxconnections < maxcached: maxconnections = maxcached if maxconnections < maxshared: maxconnections = maxshared self._maxconnections = maxconnections else: self._maxconnections = 0 self._idle_cache = [] # the actual pool of idle connections self._lock = Condition() self._connections = 0 # Establish an initial number of idle database connections: idle = [self.dedicated_connection() for i in range(mincached)] while idle: idle.pop().close() def steady_connection(self): """Get a steady, unpooled DB-API 2 connection.""" return connect( self._creator, self._maxusage, self._setsession, self._failures, self._ping, True, *self._args, **self._kwargs) def connection(self, shareable=True): """Get a steady, cached DB-API 2 connection from the pool. If shareable is set and the underlying DB-API 2 allows it, then the connection may be shared with other threads. """ if shareable and self._maxshared: with self._lock: while (not self._shared_cache and self._maxconnections and self._connections >= self._maxconnections): self._wait_lock() if len(self._shared_cache) < self._maxshared: # shared cache is not full, get a dedicated connection try: # first try to get it from the idle cache con = self._idle_cache.pop(0) except IndexError: # else get a fresh connection con = self.steady_connection() else: con._ping_check() # check this connection con = SharedDBConnection(con) self._connections += 1 else: # shared cache full or no more connections allowed self._shared_cache.sort() # least shared connection first con = self._shared_cache.pop(0) # get it while con.con._transaction: # do not share connections which are in a transaction self._shared_cache.insert(0, con) self._wait_lock() self._shared_cache.sort() con = self._shared_cache.pop(0) con.con._ping_check() # check the underlying connection con.share() # increase share of this connection # put the connection (back) into the shared cache self._shared_cache.append(con) self._lock.notify() con = PooledSharedDBConnection(self, con) else: # try to get a dedicated connection with self._lock: while (self._maxconnections and self._connections >= self._maxconnections): self._wait_lock() # connection limit not reached, get a dedicated connection try: # first try to get it from the idle cache con = self._idle_cache.pop(0) except IndexError: # else get a fresh connection con = self.steady_connection() else: con._ping_check() # check connection con = PooledDedicatedDBConnection(self, con) self._connections += 1 return con def dedicated_connection(self): """Alias for connection(shareable=False).""" return self.connection(False) def unshare(self, con): """Decrease the share of a connection in the shared cache.""" with self._lock: con.unshare() shared = con.shared if not shared: # connection is idle # try to remove it from shared cache with suppress(ValueError): # if pool has already been closed self._shared_cache.remove(con) if not shared: # connection has become idle, self.cache(con.con) # so add it to the idle cache def cache(self, con): """Put a dedicated connection back into the idle cache.""" with self._lock: if not self._maxcached or len(self._idle_cache) < self._maxcached: con._reset(force=self._reset) # rollback possible transaction # the idle cache is not full, so put it there self._idle_cache.append(con) # append it to the idle cache else: # if the idle cache is already full, con.close() # then close the connection self._connections -= 1 self._lock.notify() def close(self): """Close all connections in the pool.""" with self._lock: while self._idle_cache: # close all idle connections con = self._idle_cache.pop(0) with suppress(Exception): con.close() if self._maxshared: # close all shared connections while self._shared_cache: con = self._shared_cache.pop(0).con with suppress(Exception): con.close() self._connections -= 1 self._lock.notify_all() def __del__(self): """Delete the pool.""" # builtins (including Exceptions) might not exist anymore try: # noqa: SIM105 self.close() except: # noqa: E722, S110 pass def _wait_lock(self): """Wait until notified or report an error.""" if not self._blocking: raise TooManyConnectionsError self._lock.wait() # Auxiliary classes for pooled connections class PooledDedicatedDBConnection: """Auxiliary proxy class for pooled dedicated connections.""" def __init__(self, pool, con): """Create a pooled dedicated connection. pool: the corresponding PooledDB instance con: the underlying SteadyDB connection """ # basic initialization to make finalizer work self._con = None # proper initialization of the connection if not con.threadsafety(): raise NotSupportedError("Database module is not thread-safe.") self._pool = pool self._con = con def close(self): """Close the pooled dedicated connection.""" # Instead of actually closing the connection, # return it to the pool for future reuse. if self._con: self._pool.cache(self._con) self._con = None def __getattr__(self, name): """Proxy all members of the class.""" if self._con: return getattr(self._con, name) raise InvalidConnectionError def __del__(self): """Delete the pooled connection.""" # builtins (including Exceptions) might not exist anymore try: # noqa: SIM105 self.close() except: # noqa: E722, S110 pass def __enter__(self): """Enter a runtime context for the connection.""" return self def __exit__(self, *exc): """Exit a runtime context for the connection.""" self.close() @total_ordering class SharedDBConnection: """Auxiliary class for shared connections.""" def __init__(self, con): """Create a shared connection. con: the underlying SteadyDB connection """ self.con = con self.shared = 1 def __lt__(self, other): """Check whether this connection should come before the other one.""" if self.con._transaction == other.con._transaction: return self.shared < other.shared return not self.con._transaction def __eq__(self, other): """Check whether this connection is the same as the other one.""" return (self.con._transaction == other.con._transaction and self.shared == other.shared) def share(self): """Increase the share of this connection.""" self.shared += 1 def unshare(self): """Decrease the share of this connection.""" self.shared -= 1 class PooledSharedDBConnection: """Auxiliary proxy class for pooled shared connections.""" def __init__(self, pool, shared_con): """Create a pooled shared connection. pool: the corresponding PooledDB instance con: the underlying SharedDBConnection """ # basic initialization to make finalizer work self._con = None # proper initialization of the connection con = shared_con.con if not con.threadsafety() > 1: raise NotSupportedError("Database connection is not thread-safe.") self._pool = pool self._shared_con = shared_con self._con = con def close(self): """Close the pooled shared connection.""" # Instead of actually closing the connection, # unshare it and/or return it to the pool. if self._con: self._pool.unshare(self._shared_con) self._shared_con = self._con = None def __getattr__(self, name): """Proxy all members of the class.""" if self._con: return getattr(self._con, name) raise InvalidConnectionError def __del__(self): """Delete the pooled connection.""" # builtins (including Exceptions) might not exist anymore try: # noqa: SIM105 self.close() except: # noqa: E722, S110 pass def __enter__(self): """Enter a runtime context for the connection.""" return self def __exit__(self, *exc): """Exit a runtime context for the connection.""" self.close() WebwareForPython-DBUtils-ed2a1f2/dbutils/pooled_pg.py000066400000000000000000000264421457556542700227750ustar00rootroot00000000000000"""PooledPg - pooling for classic PyGreSQL connections. Implements a pool of steady, thread-safe cached connections to a PostgreSQL database which are transparently reused, using the classic (not DB-API 2 compliant) PyGreSQL API. This should result in a speedup for persistent applications such as the application server of "Webware for Python," without loss of robustness. Robustness is provided by using "hardened" SteadyPg connections. Even if the underlying database is restarted and all connections are lost, they will be automatically and transparently reopened. However, since you don't want this to happen in the middle of a database transaction, you must explicitly start transactions with the begin() method so that SteadyPg knows that the underlying connection shall not be replaced and errors passed on until the transaction is completed. Measures are taken to make the pool of connections thread-safe regardless of the fact that the classic PyGreSQL pg module itself is not thread-safe at the connection level. For more information on PostgreSQL, see: https://www.postgresql.org/ For more information on PyGreSQL, see: http://www.pygresql.org For more information on Webware for Python, see: https://webwareforpython.github.io/w4py/ Usage: First you need to set up the database connection pool by creating an instance of PooledPg, passing the following parameters: mincached: the initial number of connections in the pool (the default of 0 means no connections are made at startup) maxcached: the maximum number of connections in the pool (the default value of 0 or None means unlimited pool size) maxconnections: maximum number of connections generally allowed (the default value of 0 or None means any number of connections) blocking: determines behavior when exceeding the maximum (if this is set to true, block and wait until the number of connections decreases, but by default an error will be reported) maxusage: maximum number of reuses of a single connection (the default of 0 or None means unlimited reuse) When this maximum usage number of the connection is reached, the connection is automatically reset (closed and reopened). setsession: an optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to german", ...] Additionally, you have to pass the parameters for the actual PostgreSQL connection which are passed via PyGreSQL, such as the names of the host, database, user, password etc. For instance, if you want a pool of at least five connections to your local database 'mydb': from dbutils.pooled_pg import PooledPg pool = PooledPg(5, dbname='mydb') Once you have set up the connection pool you can request database connections from that pool: db = pool.connection() You can use these connections just as if they were ordinary classic PyGreSQL API connections. Actually what you get is a proxy class for the hardened SteadyPg version of the connection. The connection will not be shared with other threads. If you don't need it anymore, you should immediately return it to the pool with db.close(). You can get another connection in the same way or with db.reopen(). Warning: In a threaded environment, never do the following: res = pool.connection().query(...).getresult() This would release the connection too early for reuse which may be fatal because the connections are not thread-safe. Make sure that the connection object stays alive as long as you are using it, like that: db = pool.connection() res = db.query(...).getresult() db.close() # or del db You can also a context manager for simpler code: with pool.connection() as db: res = db.query(...).getresult() Note that you need to explicitly start transactions by calling the begin() method. This ensures that the transparent reopening will be suspended until the end of the transaction, and that the connection will be rolled back before being given back to the connection pool. To end transactions, use one of the end(), commit() or rollback() methods. Ideas for improvement: * Add a thread for monitoring, restarting (or closing) bad or expired connections (similar to DBConnectionPool/ResourcePool by Warren Smith). * Optionally log usage, bad connections and exceeding of limits. Copyright, credits and license: * Contributed as supplement for Webware for Python and PyGreSQL by Christoph Zwerschke in September 2005 * Based on the code of DBPool, contributed to Webware for Python by Dan Green in December 2000 Licensed under the MIT license. """ from contextlib import suppress from queue import Empty, Full, Queue from . import __version__ from .steady_pg import SteadyPgConnection # constants for "reset" parameter RESET_ALWAYS_ROLLBACK = 1 RESET_COMPLETELY = 2 class PooledPgError(Exception): """General PooledPg error.""" class InvalidConnectionError(PooledPgError): """Database connection is invalid.""" class TooManyConnectionsError(PooledPgError): """Too many database connections were opened.""" # deprecated alias names for error classes InvalidConnection = InvalidConnectionError TooManyConnections = TooManyConnectionsError class PooledPg: """Pool for classic PyGreSQL connections. After you have created the connection pool, you can use connection() to get pooled, steady PostgreSQL connections. """ version = __version__ def __init__( self, mincached=0, maxcached=0, maxconnections=0, blocking=False, maxusage=None, setsession=None, reset=None, *args, **kwargs): """Set up the PostgreSQL connection pool. mincached: initial number of connections in the pool (0 means no connections are made at startup) maxcached: maximum number of connections in the pool (0 or None means unlimited pool size) maxconnections: maximum number of connections generally allowed (0 or None means an arbitrary number of connections) blocking: determines behavior when exceeding the maximum (if this is set to true, block and wait until the number of connections decreases, otherwise an error will be reported) maxusage: maximum number of reuses of a single connection (0 or None means unlimited reuse) When this maximum usage number of the connection is reached, the connection is automatically reset (closed and reopened). setsession: optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to ...", "set time zone ..."] reset: how connections should be reset when returned to the pool (0 or None to rollback transactions started with begin(), 1 to always issue a rollback, 2 for a complete reset) args, kwargs: the parameters that shall be used to establish the PostgreSQL connections using class PyGreSQL pg.DB() """ self._args, self._kwargs = args, kwargs self._maxusage = maxusage self._setsession = setsession self._reset = reset or 0 if mincached is None: mincached = 0 if maxcached is None: maxcached = 0 if maxconnections is None: maxconnections = 0 if maxcached and maxcached < mincached: maxcached = mincached if maxconnections: if maxconnections < maxcached: maxconnections = maxcached # Create semaphore for number of allowed connections generally: from threading import Semaphore self._connections = Semaphore(maxconnections) self._blocking = blocking else: self._connections = None self._cache = Queue(maxcached) # the actual connection pool # Establish an initial number of database connections: idle = [self.connection() for i in range(mincached)] while idle: idle.pop().close() def steady_connection(self): """Get a steady, unpooled PostgreSQL connection.""" return SteadyPgConnection(self._maxusage, self._setsession, True, *self._args, **self._kwargs) def connection(self): """Get a steady, cached PostgreSQL connection from the pool.""" if self._connections and not self._connections.acquire(self._blocking): raise TooManyConnectionsError try: con = self._cache.get_nowait() except Empty: con = self.steady_connection() return PooledPgConnection(self, con) def cache(self, con): """Put a connection back into the pool cache.""" try: if self._reset == RESET_COMPLETELY: con.reset() # reset the connection completely elif self._reset == RESET_ALWAYS_ROLLBACK or con._transaction: with suppress(Exception): con.rollback() # rollback a possible transaction self._cache.put_nowait(con) # and then put it back into the cache except Full: con.close() if self._connections: self._connections.release() def close(self): """Close all connections in the pool.""" while 1: try: con = self._cache.get_nowait() with suppress(Exception): con.close() if self._connections: self._connections.release() except Empty: break def __del__(self): """Delete the pool.""" # builtins (including Exceptions) might not exist anymore try: # noqa: SIM105 self.close() except: # noqa: E722, S110 pass # Auxiliary class for pooled connections class PooledPgConnection: """Proxy class for pooled PostgreSQL connections.""" def __init__(self, pool, con): """Create a pooled DB-API 2 connection. pool: the corresponding PooledPg instance con: the underlying SteadyPg connection """ self._pool = pool self._con = con def close(self): """Close the pooled connection.""" # Instead of actually closing the connection, # return it to the pool so that it can be reused. if self._con: self._pool.cache(self._con) self._con = None def reopen(self): """Reopen the pooled connection.""" # If the connection is already back in the pool, # get another connection from the pool, # otherwise reopen the underlying connection. if self._con: self._con.reopen() else: self._con = self._pool.connection() def __getattr__(self, name): """Proxy all members of the class.""" if self._con: return getattr(self._con, name) raise InvalidConnectionError def __del__(self): """Delete the pooled connection.""" # builtins (including Exceptions) might not exist anymore try: # noqa: SIM105 self.close() except: # noqa: E722, S110 pass def __enter__(self): """Enter a runtime context for the connection.""" return self def __exit__(self, *exc): """Exit a runtime context for the connection.""" self.close() WebwareForPython-DBUtils-ed2a1f2/dbutils/simple_pooled_db.py000066400000000000000000000203211457556542700243130ustar00rootroot00000000000000"""SimplePooledDB - a very simple DB-API 2 database connection pool. Implements a pool of threadsafe cached DB-API 2 connections to a database which are transparently reused. This should result in a speedup for persistent applications such as the "Webware for Python" AppServer. For more information on the DB-API 2, see: https://www.python.org/dev/peps/pep-0249/ For more information on Webware for Python, see: https://webwareforpython.github.io/w4py/ Measures are taken to make the pool of connections threadsafe regardless of whether the DB-API 2 module used is threadsafe on the connection level (threadsafety > 1) or not. It must only be threadsafe on the module level (threadsafety = 1). If the DB-API 2 module is threadsafe, the connections will be shared between threads (keep this in mind if you use transactions). Usage: The idea behind SimplePooledDB is that it's completely transparent. After you have established your connection pool, stating the DB-API 2 module to be used, the number of connections to be cached in the pool and the connection parameters, e.g. import pgdb # import used DB-API 2 module from dbutils.simple_pooled_db import PooledDB dbpool = PooledDB(pgdb, 5, host=..., database=..., user=..., ...) you can demand database connections from that pool, db = dbpool.connection() and use them just as if they were ordinary DB-API 2 connections. It's really just a proxy class. db.close() will return the connection to the pool, it will not actually close it. This is so your existing code works nicely. Ideas for improvement: * Do not create the maximum number of connections on startup already, but only a certain number and the rest on demand. * Detect and transparently reset "bad" connections. * Connections should have some sort of maximum usage limit after which they should be automatically closed and reopened. * Prefer or enforce thread-affinity for the connections, allowing for both shareable and non-shareable connections. Please note that these and other ideas have been already implemented in in PooledDB, a more sophisticated version of SimplePooledDB. You might also consider using PersistentDB instead for thread-affine persistent database connections. SimplePooledDB may still serve as a very simple reference and example implementation for developers. Copyright, credits and license: * Contributed as MiscUtils/DBPool for Webware for Python by Dan Green, December 2000 * Thread safety bug found by Tom Schwaller * Fixes by Geoff Talvola (thread safety in _threadsafe_getConnection()) * Clean up by Chuck Esterbrook * Fix unthreadsafe functions which were leaking, Jay Love * Eli Green's webware-discuss comments were lifted for additional docs * Clean-up and detailed commenting, rename and move to DBUtils by Christoph Zwerschke in September 2005 Licensed under the MIT license. """ from . import __version__ class PooledDBError(Exception): """General PooledDB error.""" class NotSupportedError(PooledDBError): """DB-API module not supported by PooledDB.""" class PooledDBConnection: """A proxy class for pooled database connections. You don't normally deal with this class directly, but use PooledDB to get new connections. """ def __init__(self, pool, con): """Initialize pooled connection.""" self._con = con self._pool = pool def close(self): """Close the pooled connection.""" # Instead of actually closing the connection, # return it to the pool so that it can be reused. if self._con is not None: self._pool.returnConnection(self._con) self._con = None def __getattr__(self, name): """Get the attribute with the given name.""" # All other attributes are the same. return getattr(self._con, name) def __del__(self): """Delete the pooled connection.""" self.close() class PooledDB: """A very simple database connection pool. After you have created the connection pool, you can get connections using getConnection(). """ version = __version__ def __init__(self, dbapi, maxconnections, *args, **kwargs): """Set up the database connection pool. dbapi: the DB-API 2 compliant module you want to use maxconnections: the number of connections cached in the pool args, kwargs: the parameters that shall be used to establish the database connections using connect() """ try: threadsafety = dbapi.threadsafety except Exception: threadsafety = None if threadsafety == 0: raise NotSupportedError( "Database module does not support any level of threading.") if threadsafety == 1: # If there is no connection level safety, build # the pool using the synchronized queue class # that implements all the required locking semantics. from queue import Queue self._queue = Queue(maxconnections) # create the queue self.connection = self._unthreadsafe_get_connection self.addConnection = self._unthreadsafe_add_connection self.returnConnection = self._unthreadsafe_return_connection elif threadsafety in (2, 3): # If there is connection level safety, implement the # pool with an ordinary list used as a circular buffer. # We only need a minimum of locking in this case. from threading import Lock self._lock = Lock() # create a lock object to be used later self._nextConnection = 0 # index of the next connection to be used self._connections = [] # the list of connections self.connection = self._threadsafe_get_connection self.addConnection = self._threadsafe_add_connection self.returnConnection = self._threadsafe_return_connection else: raise NotSupportedError( "Database module threading support cannot be determined.") # Establish all database connections (it would be better to # only establish a part of them now, and the rest on demand). for _i in range(maxconnections): self.addConnection(dbapi.connect(*args, **kwargs)) # The following functions are used with DB-API 2 modules # that do not have connection level threadsafety, like PyGreSQL. # However, the module must be threadsafe at the module level. # Note: threadsafe/unthreadsafe refers to the DB-API 2 module, # not to this class which should be threadsafe in any case. def _unthreadsafe_get_connection(self): """Get a connection from the pool.""" return PooledDBConnection(self, self._queue.get()) def _unthreadsafe_add_connection(self, con): """Add a connection to the pool.""" self._queue.put(con) def _unthreadsafe_return_connection(self, con): """Return a connection to the pool. In this case, the connections need to be put back into the queue after they have been used. This is done automatically when the connection is closed and should never be called explicitly outside of this module. """ self._unthreadsafe_add_connection(con) # The following functions are used with DB-API 2 modules # that are threadsafe at the connection level, like psycopg. # Note: In this case, connections are shared between threads. # This may lead to problems if you use transactions. def _threadsafe_get_connection(self): """Get a connection from the pool.""" with self._lock: next_con = self._nextConnection con = PooledDBConnection(self, self._connections[next_con]) next_con += 1 if next_con >= len(self._connections): next_con = 0 self._nextConnection = next_con return con def _threadsafe_add_connection(self, con): """Add a connection to the pool.""" self._connections.append(con) def _threadsafe_return_connection(self, con): """Return a connection to the pool. In this case, the connections always stay in the pool, so there is no need to do anything here. """ # we don't need to do anything here WebwareForPython-DBUtils-ed2a1f2/dbutils/simple_pooled_pg.py000066400000000000000000000112031457556542700243330ustar00rootroot00000000000000"""SimplePooledPg - a very simple classic PyGreSQL connection pool. Implements a pool of threadsafe cached connections to a PostgreSQL database which are transparently reused, using the classic (not DB-API 2 compliant) PyGreSQL pg API. This should result in a speedup for persistent applications such as the "Webware for Python" AppServer. For more information on PostgreSQL, see: https://www.postgresql.org/ For more information on PyGreSQL, see: http://www.pygresql.org For more information on Webware for Python, see: https://webwareforpython.github.io/w4py/ Measures are taken to make the pool of connections threadsafe regardless of the fact that the PyGreSQL pg module itself is not threadsafe at the connection level. Connections will never be shared between threads, so you can safely use transactions. Usage: The idea behind SimplePooledPg is that it's completely transparent. After you have established your connection pool, stating the number of connections to be cached in the pool and the connection parameters, e.g. from dbutils.simple_pooled_pg import PooledPg dbpool = PooledPg(5, host=..., database=..., user=..., ...) you can demand database connections from that pool, db = dbpool.connection() and use them just as if they were ordinary PyGreSQL pg API connections. It's really just a proxy class. db.close() will return the connection to the pool, it will not actually close it. This is so your existing code works nicely. Ideas for improvement: * Do not create the maximum number of connections on startup already, but only a certain number and the rest on demand. * Detect and transparently reset "bad" connections. The PyGreSQL pg API provides a status attribute and a reset() method for that. * Connections should have some sort of "maximum usage limit" after which they should be automatically closed and reopened. * Prefer or enforce thread affinity for the connections. Please note that these and other ideas have been already implemented in in PooledPg, a more sophisticated version of SimplePooledPg. You might also consider using PersistentPg instead for thread-affine persistent PyGreSQL connections. SimplePooledPg may still serve as a very simple reference and example implementation for developers. Copyright, credits and license: * Contributed as supplement for Webware for Python and PyGreSQL by Christoph Zwerschke in September 2005 * Based on the code of DBPool, contributed to Webware for Python by Dan Green in December 2000 Licensed under the MIT license. """ from pg import DB as PgConnection # noqa: N811 from . import __version__ class PooledPgConnection: """A proxy class for pooled PostgreSQL connections. You don't normally deal with this class directly, but use PooledPg to get new connections. """ def __init__(self, pool, con): """Initialize pooled connection.""" self._con = con self._pool = pool def close(self): """Close the pooled connection.""" # Instead of actually closing the connection, # return it to the pool so that it can be reused. if self._con is not None: self._pool.cache(self._con) self._con = None def __getattr__(self, name): """Get the attribute with the given name.""" # All other attributes are the same. return getattr(self._con, name) def __del__(self): """Delete the pooled connection.""" self.close() class PooledPg: """A very simple PostgreSQL connection pool. After you have created the connection pool, you can get connections using getConnection(). """ version = __version__ def __init__(self, maxconnections, *args, **kwargs): """Set up the PostgreSQL connection pool. maxconnections: the number of connections cached in the pool args, kwargs: the parameters that shall be used to establish the PostgreSQL connections using pg.connect() """ # Since there is no connection level safety, we # build the pool using the synchronized queue class # that implements all the required locking semantics. from queue import Queue self._queue = Queue(maxconnections) # Establish all database connections (it would be better to # only establish a part of them now, and the rest on demand). for _i in range(maxconnections): self.cache(PgConnection(*args, **kwargs)) def cache(self, con): """Add or return a connection to the pool.""" self._queue.put(con) def connection(self): """Get a connection from the pool.""" return PooledPgConnection(self, self._queue.get()) WebwareForPython-DBUtils-ed2a1f2/dbutils/steady_db.py000066400000000000000000000645711457556542700227700ustar00rootroot00000000000000"""SteadyDB - hardened DB-API 2 connections. Implements steady connections to a database based on an arbitrary DB-API 2 compliant database interface module. The connections are transparently reopened when they are closed or the database connection has been lost or when they are used more often than an optional usage limit. Database cursors are transparently reopened as well when the execution of a database operation cannot be performed due to a lost connection. Only if the connection is lost after the execution, when rows are already fetched from the database, this will give an error and the cursor will not be reopened automatically, because there is no reliable way to recover the state of the cursor in such a situation. Connections which have been marked as being in a transaction with a begin() call will not be silently replaced either. A typical situation where database connections are lost is when the database server or an intervening firewall is shutdown and restarted for maintenance reasons. In such a case, all database connections would become unusable, even though the database service may be already available again. The "hardened" connections provided by this module will make the database connections immediately available again. This approach results in a steady database connection that can be used by PooledDB or PersistentDB to create pooled or persistent connections to a database in a threaded environment such as the application server of "Webware for Python." Note, however, that the connections themselves may not be thread-safe (depending on the used DB-API module). For the Python DB-API 2 specification, see: https://www.python.org/dev/peps/pep-0249/ For information on Webware for Python, see: https://webwareforpython.github.io/w4py/ Usage: You can use the connection constructor connect() in the same way as you would use the connection constructor of a DB-API 2 module if you specify the DB-API 2 module to be used as the first parameter, or alternatively you can specify an arbitrary constructor function returning new DB-API 2 compliant connection objects as the first parameter. Passing just a function allows implementing failover mechanisms and load balancing strategies. You may also specify a usage limit as the second parameter (set it to None if you prefer unlimited usage), an optional list of commands that may serve to prepare the session as a third parameter, the exception classes for which the failover mechanism shall be applied, and you can specify whether is is allowed to close the connection (by default this is true). When the connection to the database is lost or has been used too often, it will be transparently reset in most situations, without further notice. import pgdb # import used DB-API 2 module from dbutils.steady_db import connect db = connect(pgdb, 10000, ["set datestyle to german"], host=..., database=..., user=..., ...) ... cursor = db.cursor() ... cursor.execute('select ...') result = cursor.fetchall() ... cursor.close() ... db.close() Ideas for improvement: * Alternatively to the maximum number of uses, implement a maximum time to live for connections. * Optionally log usage and loss of connection. Copyright, credits and license: * Contributed as supplement for Webware for Python and PyGreSQL by Christoph Zwerschke in September 2005 * Allowing creator functions as first parameter as in SQLAlchemy suggested by Ezio Vernacotola in December 2006 Licensed under the MIT license. """ import sys from contextlib import suppress from . import __version__ class SteadyDBError(Exception): """General SteadyDB error.""" class InvalidCursorError(SteadyDBError): """Database cursor is invalid.""" # deprecated alias names for error classes InvalidCursor = InvalidCursorError def connect( creator, maxusage=None, setsession=None, failures=None, ping=1, closeable=True, *args, **kwargs): """Create a "tough" connection. A hardened version of the connection function of a DB-API 2 module. creator: either an arbitrary function returning new DB-API 2 compliant connection objects or a DB-API 2 compliant database module maxusage: maximum usage limit for the underlying DB-API 2 connection (number of database operations, 0 or None means unlimited usage) callproc(), execute() and executemany() count as one operation. When the limit is reached, the connection is automatically reset. setsession: an optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to german", "set time zone mez"] failures: an optional exception class or a tuple of exception classes for which the failover mechanism shall be applied, if the default (OperationalError, InternalError, Interface) is not adequate for the used database module ping: determines when the connection should be checked with ping() (0 = None = never, 1 = default = when _ping_check() is called, 2 = whenever a cursor is created, 4 = when a query is executed, 7 = always, and all other bit combinations of these values) closeable: if this is set to false, then closing the connection will be silently ignored, but by default the connection can be closed args, kwargs: the parameters that shall be passed to the creator function or the connection constructor of the DB-API 2 module """ return SteadyDBConnection( creator, maxusage, setsession, failures, ping, closeable, *args, **kwargs) class SteadyDBConnection: """A hardened version of DB-API 2 connections.""" version = __version__ def __init__( self, creator, maxusage=None, setsession=None, failures=None, ping=1, closeable=True, *args, **kwargs): """Create a "tough" DB-API 2 connection.""" # basic initialization to make finalizer work self._con = None self._closed = True # proper initialization of the connection try: self._creator = creator.connect try: if creator.dbapi.connect: self._dbapi = creator.dbapi except AttributeError: self._dbapi = creator except AttributeError: # try finding the DB-API 2 module via the connection creator self._creator = creator try: self._dbapi = creator.dbapi except AttributeError: try: self._dbapi = sys.modules[creator.__module__] if self._dbapi.connect != creator: raise AttributeError except (AttributeError, KeyError): self._dbapi = None try: self._threadsafety = creator.threadsafety except AttributeError: try: self._threadsafety = self._dbapi.threadsafety except AttributeError: self._threadsafety = None if not callable(self._creator): raise TypeError(f"{creator!r} is not a connection provider.") if maxusage is None: maxusage = 0 if not isinstance(maxusage, int): raise TypeError("'maxusage' must be an integer value.") self._maxusage = maxusage self._setsession_sql = setsession if failures is not None and not isinstance( failures, tuple) and not issubclass(failures, Exception): raise TypeError("'failures' must be a tuple of exceptions.") self._failures = failures self._ping = ping if isinstance(ping, int) else 0 self._closeable = closeable self._args, self._kwargs = args, kwargs self._store(self._create()) def __enter__(self): """Enter the runtime context for the connection object.""" return self def __exit__(self, *exc): """Exit the runtime context for the connection object. This does not close the connection, but it ends a transaction. """ if exc[0] is None and exc[1] is None and exc[2] is None: self.commit() else: self.rollback() def _create(self): """Create a new connection using the creator function.""" con = self._creator(*self._args, **self._kwargs) try: try: if self._dbapi.connect != self._creator: raise AttributeError except AttributeError: # try finding the DB-API 2 module via the connection itself try: mod = con.__module__ except AttributeError: mod = None while mod: try: self._dbapi = sys.modules[mod] if not callable(self._dbapi.connect): raise AttributeError except (AttributeError, KeyError): pass else: break i = mod.rfind('.') mod = None if i < 0 else mod[:i] else: try: mod = con.OperationalError.__module__ except AttributeError: mod = None while mod: try: self._dbapi = sys.modules[mod] if not callable(self._dbapi.connect): raise AttributeError except (AttributeError, KeyError): pass else: break i = mod.rfind('.') mod = None if i < 0 else mod[:i] else: self._dbapi = None if self._threadsafety is None: try: self._threadsafety = self._dbapi.threadsafety except AttributeError: with suppress(AttributeError): self._threadsafety = con.threadsafety if self._failures is None: try: self._failures = ( self._dbapi.OperationalError, self._dbapi.InterfaceError, self._dbapi.InternalError) except AttributeError: try: self._failures = ( self._creator.OperationalError, self._creator.InterfaceError, self._creator.InternalError) except AttributeError: try: self._failures = ( con.OperationalError, con.InterfaceError, con.InternalError) except AttributeError as error: raise AttributeError( "Could not determine failure exceptions" " (please set failures or creator.dbapi).", ) from error if isinstance(self._failures, tuple): self._failure = self._failures[0] else: self._failure = self._failures self._setsession(con) except Exception as error: # the database module could not be determined # or the session could not be prepared with suppress(Exception): con.close() # close the connection first raise error # re-raise the original error again return con def _setsession(self, con=None): """Execute the SQL commands for session preparation.""" if con is None: con = self._con if self._setsession_sql: cursor = con.cursor() for sql in self._setsession_sql: cursor.execute(sql) cursor.close() def _store(self, con): """Store a database connection for subsequent use.""" self._con = con self._transaction = False self._closed = False self._usage = 0 def _close(self): """Close the tough connection. You can always close a tough connection with this method, and it will not complain if you close it more than once. """ if not self._closed: with suppress(Exception): self._con.close() self._transaction = False self._closed = True def _reset(self, force=False): """Reset a tough connection. Rollback if forced or the connection was in a transaction. """ if not self._closed and (force or self._transaction): with suppress(Exception): self.rollback() def _ping_check(self, ping=1, reconnect=True): """Check whether the connection is still alive using ping(). If the underlying connection is not active and the ping parameter is set accordingly, the connection will be recreated unless the connection is currently inside a transaction. """ if ping & self._ping: try: # if possible, ping the connection try: # pass a reconnect=False flag if this is supported alive = self._con.ping(False) except TypeError: # the reconnect flag is not supported alive = self._con.ping() except (AttributeError, IndexError, TypeError, ValueError): self._ping = 0 # ping() is not available alive = None reconnect = False except Exception: alive = False else: if alive is None: alive = True if alive: reconnect = False if reconnect and not self._transaction: try: # try to reopen the connection con = self._create() except Exception: # noqa: S110 pass else: self._close() self._store(con) alive = True return alive return None def dbapi(self): """Return the underlying DB-API 2 module of the connection.""" if self._dbapi is None: raise AttributeError( "Could not determine DB-API 2 module" " (please set creator.dbapi).") return self._dbapi def threadsafety(self): """Return the thread safety level of the connection.""" if self._threadsafety is None: if self._dbapi is None: raise AttributeError( "Could not determine threadsafety" " (please set creator.dbapi or creator.threadsafety).") return 0 return self._threadsafety def close(self): """Close the tough connection. You are allowed to close a tough connection by default, and it will not complain if you close it more than once. You can disallow closing connections by setting the closeable parameter to something false. In this case, closing tough connections will be silently ignored. """ if self._closeable: self._close() elif self._transaction: self._reset() def begin(self, *args, **kwargs): """Indicate the beginning of a transaction. During a transaction, connections won't be transparently replaced, and all errors will be raised to the application. If the underlying driver supports this method, it will be called with the given parameters (e.g. for distributed transactions). """ self._transaction = True try: begin = self._con.begin except AttributeError: pass else: begin(*args, **kwargs) def commit(self): """Commit any pending transaction.""" self._transaction = False try: self._con.commit() except self._failures as error: # cannot commit try: # try to reopen the connection con = self._create() except Exception: # noqa: S110 pass else: self._close() self._store(con) raise error # re-raise the original error def rollback(self): """Rollback pending transaction.""" self._transaction = False try: self._con.rollback() except self._failures as error: # cannot rollback try: # try to reopen the connection con = self._create() except Exception: # noqa: S110 pass else: self._close() self._store(con) raise error # re-raise the original error def cancel(self): """Cancel a long-running transaction. If the underlying driver supports this method, it will be called. """ self._transaction = False try: cancel = self._con.cancel except AttributeError: pass else: cancel() def ping(self, *args, **kwargs): """Ping connection.""" return self._con.ping(*args, **kwargs) def _cursor(self, *args, **kwargs): """Create a "tough" cursor. This is a hardened version of the method cursor(). """ # The args and kwargs are not part of the standard, # but some database modules seem to use these. transaction = self._transaction if not transaction: self._ping_check(2) try: # check whether the connection has been used too often if (self._maxusage and self._usage >= self._maxusage and not transaction): raise self._failure cursor = self._con.cursor(*args, **kwargs) # try to get a cursor except self._failures as error: # error in getting cursor try: # try to reopen the connection con = self._create() except Exception: # noqa: S110 pass else: try: # and try one more time to get a cursor cursor = con.cursor(*args, **kwargs) except Exception: # noqa: S110 pass else: self._close() self._store(con) if transaction: raise error # re-raise the original error again return cursor with suppress(Exception): con.close() if transaction: self._transaction = False raise error # re-raise the original error again return cursor def cursor(self, *args, **kwargs): """Return a new Cursor Object using the connection.""" return SteadyDBCursor(self, *args, **kwargs) def __del__(self): """Delete the steady connection.""" # builtins (including Exceptions) might not exist anymore try: # noqa: SIM105 self._close() # make sure the connection is closed except: # noqa: E722, S110 pass class SteadyDBCursor: """A hardened version of DB-API 2 cursors.""" def __init__(self, con, *args, **kwargs): """Create a "tough" DB-API 2 cursor.""" # basic initialization to make finalizer work self._cursor = None self._closed = True # proper initialization of the cursor self._con = con self._args, self._kwargs = args, kwargs self._clearsizes() try: self._cursor = con._cursor(*args, **kwargs) except AttributeError as error: raise TypeError(f"{con!r} is not a SteadyDBConnection.") from error self._closed = False def __enter__(self): """Enter the runtime context for the cursor object.""" return self def __exit__(self, *exc): """Exit the runtime context for the cursor object.""" self.close() def __iter__(self): """Make cursor compatible to the iteration protocol.""" cursor = self._cursor try: # use iterator provided by original cursor return iter(cursor) except TypeError: # create iterator if not provided return iter(cursor.fetchone, None) def setinputsizes(self, sizes): """Store input sizes in case cursor needs to be reopened.""" self._inputsizes = sizes def setoutputsize(self, size, column=None): """Store output sizes in case cursor needs to be reopened.""" self._outputsizes[column] = size def _clearsizes(self): """Clear stored input and output sizes.""" self._inputsizes = [] self._outputsizes = {} def _setsizes(self, cursor=None): """Set stored input and output sizes for cursor execution.""" if cursor is None: cursor = self._cursor if self._inputsizes: cursor.setinputsizes(self._inputsizes) for column, size in self._outputsizes.items(): if column is None: cursor.setoutputsize(size) else: cursor.setoutputsize(size, column) def close(self): """Close the tough cursor. It will not complain if you close it more than once. """ if not self._closed: with suppress(Exception): self._cursor.close() self._closed = True def _get_tough_method(self, name): """Return a "tough" version of the given cursor method.""" def tough_method(*args, **kwargs): execute = name.startswith('execute') con = self._con transaction = con._transaction if not transaction: con._ping_check(4) try: # check whether the connection has been used too often if (con._maxusage and con._usage >= con._maxusage and not transaction): raise con._failure if execute: self._setsizes() method = getattr(self._cursor, name) result = method(*args, **kwargs) # try to execute if execute: self._clearsizes() except con._failures as error: # execution error if not transaction: try: cursor2 = con._cursor( *self._args, **self._kwargs) # open new cursor except Exception: # noqa: S110 pass else: try: # and try one more time to execute if execute: self._setsizes(cursor2) method = getattr(cursor2, name) result = method(*args, **kwargs) if execute: self._clearsizes() except Exception: # noqa: S110 pass else: self.close() self._cursor = cursor2 con._usage += 1 return result with suppress(Exception): cursor2.close() try: # try to reopen the connection con2 = con._create() except Exception: # noqa: S110 pass else: try: cursor2 = con2.cursor( *self._args, **self._kwargs) # open new cursor except Exception: # noqa: S110 pass else: if transaction: self.close() con._close() con._store(con2) self._cursor = cursor2 raise error # raise the original error again error2 = None try: # try one more time to execute if execute: self._setsizes(cursor2) method2 = getattr(cursor2, name) result = method2(*args, **kwargs) if execute: self._clearsizes() except error.__class__: # same execution error use2 = False error2 = error except Exception as error: # other execution errors use2 = True error2 = error else: use2 = True if use2: self.close() con._close() con._store(con2) self._cursor = cursor2 con._usage += 1 if error2: raise error2 # raise the other error return result with suppress(Exception): cursor2.close() with suppress(Exception): con2.close() if transaction: self._transaction = False raise error # re-raise the original error again else: con._usage += 1 return result return tough_method def __getattr__(self, name): """Inherit methods and attributes of underlying cursor.""" if self._cursor: if name.startswith(('execute', 'call')): # make execution methods "tough" return self._get_tough_method(name) return getattr(self._cursor, name) raise InvalidCursorError def __del__(self): """Delete the steady cursor.""" # builtins (including Exceptions) might not exist anymore try: # noqa: SIM105 self.close() # make sure the cursor is closed except: # noqa: E722, S110 pass WebwareForPython-DBUtils-ed2a1f2/dbutils/steady_pg.py000066400000000000000000000255521457556542700230050ustar00rootroot00000000000000"""SteadyPg - hardened classic PyGreSQL connections. Implements steady connections to a PostgreSQL database using the classic (not DB-API 2 compliant) PyGreSQL API. The connections are transparently reopened when they are closed or the database connection has been lost or when they are used more often than an optional usage limit. Only connections which have been marked as being in a database transaction with a begin() call will not be silently replaced. A typical situation where database connections are lost is when the database server or an intervening firewall is shutdown and restarted for maintenance reasons. In such a case, all database connections would become unusable, even though the database service may be already available again. The "hardened" connections provided by this module will make the database connections immediately available again. This results in a steady PostgreSQL connection that can be used by PooledPg or PersistentPg to create pooled or persistent connections to a PostgreSQL database in a threaded environment such as the application server of "Webware for Python." Note, however, that the connections themselves are not thread-safe. For more information on PostgreSQL, see: https://www.postgresql.org/ For more information on PyGreSQL, see: http://www.pygresql.org For more information on Webware for Python, see: https://webwareforpython.github.io/w4py/ Usage: You can use the class SteadyPgConnection in the same way as you would use the class DB from the classic PyGreSQL API module db. The only difference is that you may specify a usage limit as the first parameter when you open a connection (set it to None if you prefer unlimited usage), and an optional list of commands that may serve to prepare the session as the second parameter, and you can specify whether is is allowed to close the connection (by default this is true). When the connection to the PostgreSQL database is lost or has been used too often, it will be automatically reset, without further notice. from dbutils.steady_pg import SteadyPgConnection db = SteadyPgConnection(10000, ["set datestyle to german"], host=..., dbname=..., user=..., ...) ... result = db.query('...') ... db.close() Ideas for improvement: * Alternatively to the maximum number of uses, implement a maximum time to live for connections. * Optionally log usage and loss of connection. Copyright, credits and license: * Contributed as supplement for Webware for Python and PyGreSQL by Christoph Zwerschke in September 2005 Licensed under the MIT license. """ from contextlib import suppress from pg import DB as PgConnection # noqa: N811 from . import __version__ class SteadyPgError(Exception): """General SteadyPg error.""" class InvalidConnectionError(SteadyPgError): """Database connection is invalid.""" # deprecated alias names for error classes InvalidConnection = InvalidConnectionError class SteadyPgConnection: """Class representing steady connections to a PostgreSQL database. Underlying the connection is a classic PyGreSQL pg API database connection which is reset if the connection is lost or used too often. Thus the resulting connection is steadier ("tough and self-healing"). If you want the connection to be persistent in a threaded environment, then you should not deal with this class directly, but use either the PooledPg module or the PersistentPg module to get the connections. """ version = __version__ def __init__( self, maxusage=None, setsession=None, closeable=True, *args, **kwargs): """Create a "tough" PostgreSQL connection. A hardened version of the DB wrapper class of PyGreSQL. maxusage: maximum usage limit for the underlying PyGreSQL connection (number of uses, 0 or None means unlimited usage) When this limit is reached, the connection is automatically reset. setsession: optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to ...", "set time zone ..."] closeable: if this is set to false, then closing the connection will be silently ignored, but by default the connection can be closed args, kwargs: the parameters that shall be used to establish the PostgreSQL connections with PyGreSQL using pg.DB() """ # basic initialization to make finalizer work self._con = None self._closed = True # proper initialization of the connection if maxusage is None: maxusage = 0 if not isinstance(maxusage, int): raise TypeError("'maxusage' must be an integer value.") self._maxusage = maxusage self._setsession_sql = setsession self._closeable = closeable self._con = PgConnection(*args, **kwargs) self._transaction = False self._closed = False self._setsession() self._usage = 0 def __enter__(self): """Enter the runtime context. This will start a transaction.""" self.begin() return self def __exit__(self, *exc): """Exit the runtime context. This will end the transaction.""" if exc[0] is None and exc[1] is None and exc[2] is None: self.commit() else: self.rollback() def _setsession(self): """Execute the SQL commands for session preparation.""" if self._setsession_sql: for sql in self._setsession_sql: self._con.query(sql) def _close(self): """Close the tough connection. You can always close a tough connection with this method, and it will not complain if you close it more than once. """ if not self._closed: with suppress(Exception): self._con.close() self._transaction = False self._closed = True def close(self): """Close the tough connection. You are allowed to close a tough connection by default, and it will not complain if you close it more than once. You can disallow closing connections by setting the closeable parameter to something false. In this case, closing tough connections will be silently ignored. """ if self._closeable: self._close() elif self._transaction: self.reset() def reopen(self): """Reopen the tough connection. It will not complain if the connection cannot be reopened. """ try: self._con.reopen() except Exception: if self._transaction: self._transaction = False with suppress(Exception): self._con.query('rollback') else: self._transaction = False self._closed = False self._setsession() self._usage = 0 def reset(self): """Reset the tough connection. If a reset is not possible, tries to reopen the connection. It will not complain if the connection is already closed. """ try: self._con.reset() self._transaction = False self._setsession() self._usage = 0 except Exception: try: self.reopen() except Exception: with suppress(Exception): self.rollback() def begin(self, sql=None): """Begin a transaction.""" self._transaction = True try: begin = self._con.begin except AttributeError: return self._con.query(sql or 'begin') else: # use existing method if available return begin(sql=sql) if sql else begin() def end(self, sql=None): """Commit the current transaction.""" self._transaction = False try: end = self._con.end except AttributeError: return self._con.query(sql or 'end') else: return end(sql=sql) if sql else end() def commit(self, sql=None): """Commit the current transaction.""" self._transaction = False try: commit = self._con.commit except AttributeError: return self._con.query(sql or 'commit') else: return commit(sql=sql) if sql else commit() def rollback(self, sql=None): """Rollback the current transaction.""" self._transaction = False try: rollback = self._con.rollback except AttributeError: return self._con.query(sql or 'rollback') else: return rollback(sql=sql) if sql else rollback() def _get_tough_method(self, method): """Return a "tough" version of a connection class method. The tough version checks whether the connection is bad (lost) and automatically and transparently tries to reset the connection if this is the case (for instance, the database has been restarted). """ def tough_method(*args, **kwargs): transaction = self._transaction if not transaction: try: # check whether connection status is bad # or the connection has been used too often if not self._con.db.status or ( self._maxusage and self._usage >= self._maxusage): raise AttributeError except Exception: self.reset() # then reset the connection try: result = method(*args, **kwargs) # try connection method except Exception: # error in query if transaction: # inside a transaction self._transaction = False raise # propagate the error if self._con.db.status: # if it was not a connection problem raise # then propagate the error self.reset() # reset the connection result = method(*args, **kwargs) # and try one more time self._usage += 1 return result return tough_method def __getattr__(self, name): """Inherit the members of the standard connection class. Some methods are made "tougher" than in the standard version. """ if self._con: attr = getattr(self._con, name) if (name in ('query', 'get', 'insert', 'update', 'delete') or name.startswith('get_')): attr = self._get_tough_method(attr) return attr raise InvalidConnectionError def __del__(self): """Delete the steady connection.""" # builtins (including Exceptions) might not exist anymore try: # noqa: SIM105 self._close() # make sure the connection is closed except: # noqa: E722, S110 pass WebwareForPython-DBUtils-ed2a1f2/docs/000077500000000000000000000000001457556542700177255ustar00rootroot00000000000000WebwareForPython-DBUtils-ed2a1f2/docs/changelog.html000066400000000000000000000270511457556542700225470ustar00rootroot00000000000000 Changelog for DBUtils

Changelog for DBUtils

3.1.0

DBUtils 3.1.0 was released on March 17, 2024.

Changes:

3.0.3

DBUtils 3.0.3 was released on April 27, 2023.

Changes:

3.0.2

DBUtils 3.0.2 was released on January 14, 2022.

The optional iterator protocol on cursors is now supported.

3.0.1

DBUtils 3.0.1 was released on December 22, 2021.

It includes InterfaceError to the default list of exceptions for which the connection failover mechanism is applied. You can override this with the failures parameter.

3.0.0

DBUtils 3.0.0 was released on November 26, 2021.

It is intended to be used with Python versions 3.6 to 3.10.

Changes:

2.0.3

DBUtils 2.0.3 was released on November 26, 2021.

Changes:

2.0.2

DBUtils 2.0.2 was released on June 8, 2021.

Changes:

2.0.1

DBUtils 2.0.1 was released on April 8, 2021.

Changes:

2.0

DBUtils 2.0 was released on September 26, 2020.

It is intended to be used with Python versions 2.7 and 3.5 to 3.9.

Changes:

1.4

DBUtils 1.4 was released on September 26, 2020.

It is intended to be used with Python versions 2.7 and 3.5 to 3.9.

Improvements:

1.3

DBUtils 1.3 was released on March 3, 2018.

It is intended to be used with Python versions 2.6, 2.7 and 3.4 to 3.7.

Improvements:

1.2

DBUtils 1.2 was released on February 5, 2017.

It is intended to be used with Python versions 2.6, 2.7 and 3.0 to 3.6.

1.1.1

DBUtils 1.1.1 was released on February 4, 2017.

It is intended to be used with Python versions 2.3 to 2.7.

Improvements:

Bugfixes:

1.1

DBUtils 1.1 was released on August 14, 2011.

Improvements:

Bugfixes:

1.0

DBUtils 1.0 was released on November 29, 2008.

It is intended to be used with Python versions 2.2 to 2.6.

Changes:

Bugfixes and improvements:

0.9.4

DBUtils 0.9.4 was released on July 7, 2007.

This release fixes a problem in the destructor code and has been supplemented with a German User's Guide.

Again, please note that the dbapi parameter has been renamed to creator in the last release, since you can now pass custom creator functions for database connections instead of DB-API 2 modules.

0.9.3

DBUtils 0.9.3 was released on May 21, 2007.

Changes:

0.9.2

DBUtils 0.9.2 was released on September 22, 2006.

It is intended to be used with Python versions 2.2 to 2.5.

Changes:

0.9.1

DBUtils 0.9.1 was released on May 8, 2006.

It is intended to be used with Python versions 2.2 to 2.4.

Changes:

0.8.1 - 2005-09-13

DBUtils 0.8.1 was released on September 13, 2005.

It is intended to be used with Python versions 2.0 to 2.4.

This is the first public release of DBUtils.

WebwareForPython-DBUtils-ed2a1f2/docs/changelog.rst000066400000000000000000000174011457556542700224110ustar00rootroot00000000000000Changelog for DBUtils +++++++++++++++++++++ 3.1.0 ===== DBUtils 3.1.0 was released on March 17, 2024. Changes: * Support Python version 3.12, cease support for Python 3.6. * Various small internal improvements and modernizations. 3.0.3 ===== DBUtils 3.0.3 was released on April 27, 2023. Changes: * Support Python version 3.11. * Improve determination of DB API module if creator is specified. * Minor fixes and section an advanced usage in docs. 3.0.2 ===== DBUtils 3.0.2 was released on January 14, 2022. The optional iterator protocol on cursors is now supported. 3.0.1 ===== DBUtils 3.0.1 was released on December 22, 2021. It includes ``InterfaceError`` to the default list of exceptions for which the connection failover mechanism is applied. You can override this with the ``failures`` parameter. 3.0.0 ===== DBUtils 3.0.0 was released on November 26, 2021. It is intended to be used with Python versions 3.6 to 3.10. Changes: * Cease support for Python 2 and 3.5, minor optimizations. 2.0.3 ===== DBUtils 2.0.3 was released on November 26, 2021. Changes: * Support Python version 3.10. 2.0.2 ===== DBUtils 2.0.2 was released on June 8, 2021. Changes: * Allow using context managers for pooled connections. 2.0.1 ===== DBUtils 2.0.1 was released on April 8, 2021. Changes: * Avoid "name Exception is not defined" when exiting. 2.0 === DBUtils 2.0 was released on September 26, 2020. It is intended to be used with Python versions 2.7 and 3.5 to 3.9. Changes: * DBUtils does not act as a Webware plugin anymore, it is now just an ordinary Python package (of course it could be used as such also before). * The Webware ``Examples`` folder has been removed. * Folders, packages and modules have been renamed to lower-case. Particularly, you need to import ``dbutils`` instead of ``DBUtils`` now. * The internal naming conventions have also been changed to comply with PEP8. * The documentation has been adapted to reflect the changes in this version. * This changelog has been compiled from the former release notes. 1.4 === DBUtils 1.4 was released on September 26, 2020. It is intended to be used with Python versions 2.7 and 3.5 to 3.9. Improvements: * The ``SteadyDB`` and ``SteadyPg`` classes only reconnect after the ``maxusage`` limit has been reached when the connection is not currently inside a transaction. 1.3 === DBUtils 1.3 was released on March 3, 2018. It is intended to be used with Python versions 2.6, 2.7 and 3.4 to 3.7. Improvements: * This version now supports context handlers for connections and cursors. 1.2 === DBUtils 1.2 was released on February 5, 2017. It is intended to be used with Python versions 2.6, 2.7 and 3.0 to 3.6. 1.1.1 ===== DBUtils 1.1.1 was released on February 4, 2017. It is intended to be used with Python versions 2.3 to 2.7. Improvements: * Reopen ``SteadyDB`` connections when commit or rollback fails (suggested by Ben Hoyt). Bugfixes: * Fixed a problem when running under Jython (reported by Vitaly Kruglikov). 1.1 === DBUtils 1.1 was released on August 14, 2011. Improvements: * The transparent reopening of connections is actually an undesired behavior if it happens during database transactions. In these cases, the transaction should fail and the error be reported back to the application instead of the rest of the transaction being executed in a new connection and therefore in a new transaction. Therefore DBUtils now allows suspending the transparent reopening during transactions. All you need to do is indicate the beginning of a transaction by calling the ``begin()`` method of the connection. DBUtils makes sure that this method always exists, even if the database driver does not support it. * If the database driver supports a ``ping()`` method, then DBUtils can use it to check whether connections are alive instead of just trying to use the connection and reestablishing it in case it was dead. Since these checks are done at the expense of some performance, you have exact control when these are executed via the new ``ping`` parameter. * ``PooledDB`` has got another new parameter ``reset`` for controlling how connections are reset before being put back into the pool. Bugfixes: * Fixed propagation of error messages when the connection was lost. * Fixed an issue with the ``setoutputsize()`` cursor method. * Fixed some minor issues with the ``DBUtilsExample`` for Webware. 1.0 === DBUtils 1.0 was released on November 29, 2008. It is intended to be used with Python versions 2.2 to 2.6. Changes: * Added a ``failures`` parameter for configuring the exception classes for which the failover mechanisms is applied (as suggested by Matthew Harriger). * Added a ``closeable`` parameter for configuring whether connections can be closed (otherwise closing connections will be silently ignored). * It is now possible to override defaults via the ``creator.dbapi`` and ``creator.threadsafety`` attributes. * Added an alias method ``dedicated_connection`` as a shorthand for ``connection(shareable=False)``. * Added a version attribute to all exported classes. * Where the value ``0`` has the meaning "unlimited", parameters can now be also set to the value ``None`` instead. * It turned out that ``threading.local`` does not work properly with ``mod_wsgi``, so we use the Python implementation for thread-local data even when a faster ``threading.local`` implementation is available. A new parameter ``threadlocal`` allows you to pass an arbitrary class such as ``threading.local`` if you know it works in your environment. Bugfixes and improvements: * In some cases, when instance initialization failed or referenced objects were already destroyed, finalizers could throw exceptions or create infinite recursion (problem reported by Gregory Pinero and Jehiah Czebotar). * DBUtils now tries harder to find the underlying DB-API 2 module if only a connection creator function is specified. This had not worked before with the MySQLdb module (problem reported by Gregory Pinero). 0.9.4 ===== DBUtils 0.9.4 was released on July 7, 2007. This release fixes a problem in the destructor code and has been supplemented with a German User's Guide. Again, please note that the ``dbapi`` parameter has been renamed to ``creator`` in the last release, since you can now pass custom creator functions for database connections instead of DB-API 2 modules. 0.9.3 ===== DBUtils 0.9.3 was released on May 21, 2007. Changes: * Support custom creator functions for database connections. These can now be used as the first parameter instead of an DB-API module (suggested by Ezio Vernacotola). * Added destructor for steady connections. * Use setuptools_ if available. * Some code cleanup. * Some fixes in the documentation. Added Chinese translation of the User's Guide, kindly contributed by gashero. .. _setuptools: https://github.com/pypa/setuptools 0.9.2 ===== DBUtils 0.9.2 was released on September 22, 2006. It is intended to be used with Python versions 2.2 to 2.5. Changes: * Renamed ``SolidDB`` to ``SteadyDB`` to avoid confusion with the "solidDB" storage engine. Accordingly, renamed ``SolidPg`` to ``SteadyPg``. 0.9.1 ===== DBUtils 0.9.1 was released on May 8, 2006. It is intended to be used with Python versions 2.2 to 2.4. Changes: * Added ``_closeable`` attribute and made persistent connections not closeable by default. This allows ``PersistentDB`` to be used in the same way as you would use ``PooledDB``. * Allowed arguments in the DB-API 2 ``cursor()`` method. MySQLdb is using this to specify cursor classes. (Suggested by Michael Palmer.) * Improved the documentation and added a User's Guide. 0.8.1 - 2005-09-13 ================== DBUtils 0.8.1 was released on September 13, 2005. It is intended to be used with Python versions 2.0 to 2.4. This is the first public release of DBUtils. WebwareForPython-DBUtils-ed2a1f2/docs/dependencies_db.png000066400000000000000000000106751457556542700235370ustar00rootroot00000000000000‰PNG  IHDRr &{«»PLTE@€ Àÿ@@@@€@ @À@ÿ€€@€€€ €À€ÿ  @ €   À ÿÀÀ@À€À ÀÀÀÿÿÿ@ÿ€ÿ ÿÀÿÿ@@@@€@ @À@ÿ@@@@@@@€@@ @@À@@ÿ@€@€@@€€@€ @€À@€ÿ@ @ @@ €@  @ À@ ÿ@À@À@@À€@À @ÀÀ@Àÿ@ÿ@ÿ@@ÿ€@ÿ @ÿÀ@ÿÿ€€@€€€ €À€ÿ€@€@@€@€€@ €@À€@ÿ€€€€@€€€€€ €€À€€ÿ€ € @€ €€  € À€ ÿ€À€À@€À€€À €ÀÀ€Àÿ€ÿ€ÿ@€ÿ€€ÿ €ÿÀ€ÿÿ  @ €   À ÿ @ @@ @€ @  @À @ÿ € €@ €€ €  €À €ÿ    @  €     À  ÿ À À@ À€ À  ÀÀ Àÿ ÿ ÿ@ ÿ€ ÿ  ÿÀ ÿÿÀÀ@À€À ÀÀÀÿÀ@À@@À@€À@ À@ÀÀ@ÿÀ€À€@À€€À€ À€ÀÀ€ÿÀ À @À €À  À ÀÀ ÿÀÀÀÀ@ÀÀ€ÀÀ ÀÀÀÀÀÿÀÿÀÿ@Àÿ€Àÿ ÀÿÀÀÿÿÿÿ@ÿ€ÿ ÿÀÿÿÿ@ÿ@@ÿ@€ÿ@ ÿ@Àÿ@ÿÿ€ÿ€@ÿ€€ÿ€ ÿ€Àÿ€ÿÿ ÿ @ÿ €ÿ  ÿ Àÿ ÿÿÀÿÀ@ÿÀ€ÿÀ ÿÀÀÿÀÿÿÿÿÿ@ÿÿ€ÿÿ ÿÿÀÿÿÿ 000PPP```pppxxxˆˆˆ˜˜˜¨¨¨°°°¸¸¸ÈÈÈÐÐÐØØØàààÝÏLebKGDˆH pHYs  šœ›IDATxÚíË–¢¼€™Õ‹œQ‡QYïÁHý§Õœ5²ÑQ=iŽ|GDÀþ ×â’Ç8¹pU°C%Y« EÈN¾lvîApPòùäò! È{ˆÜ²|À!ø–uY2 @Á·¾Î4§,^ ]Fn—\uwÀ<]p!÷ÈwI©pœS¯ ¸p]Õ#Ç |ÄëºY €ÕÒsKÄ6LUKÁ*gV9ó”Uße*÷g±*ï,^Jù-"G€; Ê³V«9Ä\ëtøPFˆ r• þ²ª²»–Àž¶¤fŠåÆÈ g9fþ¯M³\ôEÆ…ÛP¼çz%DÓª3#ɬPEËtOùJ1Ê!,ëlTnsFøW+ªUw¢¦é\¸£*gé( ®ÌMùdh]•G^42M+ ¿Fùþ¶Å¯¿Ÿ6ůž6¶ûrªY'9®RrÝË>®¼sàÚ‹O"¶^S?pÛ¤•û Ó#)רUƒÉ!à~Ázѯ$çøÎê¶¾µ–m*%‰‘"_;Ó`‚c%†…~Äõ§c Ý™­<#Óòhÿf}Y–ûDp}#1‹÷TËYj yäJ£¢‘Xì'à’Ÿ•ÔѸqDù‘4$pmLZñLæy+‰~õ¶¶7Ó;k{>¹œL ‹qþaÍ ÕÈZ,äc„ÛD>F`â–Å…Ï%ÁøAð&EÁ•®Dt.‚ˆ´=ÏÛf©)äñFº@£¢+T|iÉ·ÕáÏäp”rª;\—úõ©á’Ϊq̇ڷ«ÂŽÍ6yÛ뇵29×K¹YpÍÐÙ¥a­óÔ¤"b4*‡µŠ i];΀’ù¦[†—29Öƒ¸—×õ3/õ?ÏâJE 4ªo…MQ´?zÔisצ(„MÄÅ ÝéÇ ó|›ˆ7ÝÜâlóW‘wlºŠ[Tá» h%çÖ—H»µ¨‚Õ"×Õ(ÎõK‡OrÎ?¸W]¼wÄ¡¦kÇ- -ÄŒ¹Õgä×KYl;jmîȃ†é2zû±ëˆm ¶‚¼©eºA1ðc ¶îlyC qÕ†þÄÉ?õy3xCްïÈ›U âgÞcY‡Z@ÞHa…›riÄÛ@Þ¤E®õ(Ä[qÒ 7¢;B‰·‚¼A*¸ö”I¼8·wkÄ"J¼ä·7Í…Öžr‰·ƒüæÊPhí)™xK¾µnÍ”ÈÚS6ñ–ßZ F”N¼-r7V â†åo ùj+¬öìñ¶ßfY„ #vxk®)oÊ›¨Ú³Ä[C~SwRPíÙ â\UÊ;·¬/-¦Úšæïq~Èym9ßèñ͈óBÎuÓ“ÕÆæ­Î焜÷Ö¾¼…v†8äü7°r×Éj·¤ýEÞB‹ƒ³IÄð;!oÅÇ&×bìq.È+@åsUW‹Vtþ ÏñÜN缪»~ê;2 'ž“@|ÿ,Ï~á§æÝ"Îy›Sß‘)òÊÁ”jäÜF:Fœò’:Æc^!ó™xm4=‰·F'Ö‰7$=sÓD|NÎ)òŸ¾ñ‹‡ç;ç¼4Ø­=sðF}GÆ?Ô@hçÄ9óÖø´1±:ÇÄß «ý4ÐŒ\Qƒ÷ý¬¯õ-‰sA^òi‰;ýy­Ëý;Æš»eÞ™?Ãeî·US·0"Z¾7N]-qAÞ=â\,&Øï±!¶üóÚˆÞÁ¿¹Ã¼5B m”ñFKØr ðn²t8ä¥qÚ0ŠLG¥Zžxm\:¿ˆBæ­Ÿ Iî@SFp÷Ÿz*yjãQ}v‘8ï ò@`þáFÈs˜xm\;úñ´eΟ6¬Rð\¹Ü[‚É~QW’߈8ä% €ÆÌÏ4ñɼ6"ŒŽàWâBrC(kÉ?µàwÝ|Ç¡ºIœKïóÆNËûU{sîo#v”8ä7 $†`?Ňßl hÆ%Î^ç3’x˸Vâç1bs@§'§GÄ9MQð~ºXw‰óšˆãë|Ðߘ8·é渾ÅCÕyä·µã<‘SÕt¹ø9¼ß;k§‰ó]:„¾vêL¾ZxñMûœ-!ï‰ïÞë$rä´ít³; V:‚œ¸Ñòš¸xäÄe‹Æ¥ûÄÅ#§~æZ[Âßâ[-úëq È–m—^—ŸÓì›P`æ%èDËubgÂäpþo0ÁÕçò9øÄÕgŒãÚrl„‚C><ùØëX Ï8ÝGA)v<$l,„R­fg ¾*­Â_Ä"ˆ¬(Æ×Çô@îÄqÇ•Öì‘Kð?øšD *×Ý[‚#Ñ:ô~OIÈï%éôÎa{…¸MS¨Ûû³nBäõó¯ö‚v}¢Ð­¶ô•ïm>/ ø.ȇ0 ȇ0 ¡Qø?äå n&jIEND®B`‚WebwareForPython-DBUtils-ed2a1f2/docs/dependencies_pg.png000066400000000000000000000103211457556542700235440ustar00rootroot00000000000000‰PNG  IHDRr &{«»PLTE@€ Àÿ@@@@€@ @À@ÿ€€@€€€ €À€ÿ  @ €   À ÿÀÀ@À€À ÀÀÀÿÿÿ@ÿ€ÿ ÿÀÿÿ@@@@€@ @À@ÿ@@@@@@@€@@ @@À@@ÿ@€@€@@€€@€ @€À@€ÿ@ @ @@ €@  @ À@ ÿ@À@À@@À€@À @ÀÀ@Àÿ@ÿ@ÿ@@ÿ€@ÿ @ÿÀ@ÿÿ€€@€€€ €À€ÿ€@€@@€@€€@ €@À€@ÿ€€€€@€€€€€ €€À€€ÿ€ € @€ €€  € À€ ÿ€À€À@€À€€À €ÀÀ€Àÿ€ÿ€ÿ@€ÿ€€ÿ €ÿÀ€ÿÿ  @ €   À ÿ @ @@ @€ @  @À @ÿ € €@ €€ €  €À €ÿ    @  €     À  ÿ À À@ À€ À  ÀÀ Àÿ ÿ ÿ@ ÿ€ ÿ  ÿÀ ÿÿÀÀ@À€À ÀÀÀÿÀ@À@@À@€À@ À@ÀÀ@ÿÀ€À€@À€€À€ À€ÀÀ€ÿÀ À @À €À  À ÀÀ ÿÀÀÀÀ@ÀÀ€ÀÀ ÀÀÀÀÀÿÀÿÀÿ@Àÿ€Àÿ ÀÿÀÀÿÿÿÿ@ÿ€ÿ ÿÀÿÿÿ@ÿ@@ÿ@€ÿ@ ÿ@Àÿ@ÿÿ€ÿ€@ÿ€€ÿ€ ÿ€Àÿ€ÿÿ ÿ @ÿ €ÿ  ÿ Àÿ ÿÿÀÿÀ@ÿÀ€ÿÀ ÿÀÀÿÀÿÿÿÿÿ@ÿÿ€ÿÿ ÿÿÀÿÿÿ 000PPP```pppxxxˆˆˆ˜˜˜¨¨¨°°°¸¸¸ÈÈÈÐÐÐØØØàààÝÏLebKGDˆH pHYs  šœ ¯IDATxÚí=~£:‡é²‘·¥Š³W3y§ÊnEªL*g™Ên‚\e!ãÊ2à™ß¤eÜ# Œ° ±‰>p…?À€Î£ÃÑ{dX,/Þ€`@> –ù€|X䧈œa- ë±ÕšL‚,¼9¶¦•S3i?/kÎW(¨6j¤±uµ 9J«s¬ …Bæ,Óê×ë]ˆø¦»5ô·õºÝƒ¯Þ¨ÎŸî Vγ-Xvó¯`ºÂC~ÌÆá”F2ÌÀ4Ân×£¬á j]7…/yjŠ¢( [–!/õK­G•ŠGÒ8k¦QÔDé^£[¬ÆÐÄùá‚F#Âüˆ•ï¼^ƒõ5™…ùJÍÆÛz-÷à«é7õ Û XÕy`Ÿ¢`¢Ø`4Ê cô\ ñ×Ê–ðrù§uŒÂh±Œ¼ð¢Íº†š®N= 3áÖ>G~ÎØ·ð£çÒÅF½4ÞAŽÎùÕ; ÖÓÚ;¯×Ùú:½[߈U³ê$åð#òxvr$ „Pl öoyrå#4"OÞáFfëÒ`«ü¬±i’§þ ®=¡…—_Ê’û€¼°@±±É6å}F´hâWÒh@ž¨^»ÀïÅÁ•w^¯p *³ru_õò¤ÅËeÁŠbsh$(xg&ÞÈ¢ó4#ðò)\ u䵺Û‘K³\*Tzù¥,G^X Ø¨•ƶA÷cî ÁUu+.%¸|ζï¼^¹Oæ”Pq))õZì!.%ÕËÕ6b×ËeÁŠbsû  ò\;{ᜫo)¿óü'ÈÛ݃£•ºnj¥¥‰Aœ‹ºÛóribã±4ÎÚhàö$q5>Ch]6÷Õ;o£}ˆ¼ç¼Ýàë»{Àêx=¿:TWÓj óCZ[ Ÿ<À˼ÁMxÔäŽó´Î /¿&ÖU^¾[Ó•‰,QtÁö½¼°@±ñHat¹šÉBÍËÄi7-¹[&7åþZ£<àÝC Hä,ªšÏšugÏmIba"“C5OržÇq[* T5ÒØºš×tEÍxºŒ)ö ›ß.ZúšUÕÂY¶9a^ãQ~öÁK.’Dù3º%òF]7w¸Úú 6ïØö!U¼ÆÛ*¾8ðK9Ór¾]±¶¾æÖnE»EÖ!ÕºQû]…7n½¼aóžmhÄ%ª@^…öûM´ÁtV®[/º¹Åº–» ‰ý!¡½»§µQ!í#¡ö{ÇFvo7Ì“5n¢Ð>>†Qó0¤Éa¸·kÚˆ‰‡Ý¸­ÔÛNI˜¾½9êevò­šÖoâ¡å©]ZÞžƒD(Õ1Ÿ$EQË5L‘1gcïÕ§6[´SžýÙZð›®‹*_Ú'‹ø…þ¥½¦[M =3s‡v'h“½ã©“;O ¢?,ÒéDŽ=ï#¥~ %pEÀ ÙI"÷º7M°7vœ%s—Ƚ®y/EöŒnpqϳåäZ‘'^w朸‡Ü¹¸ÅúÖ‰y™ âömÏÅmV· ä¸ó®Ø•‹Û<µvä’´ÂlÁ—~BN9†žP·D§è¡ç ˜—.nõúÒ‰œ'ˆMã_‡E󺮖gi–.n·®u?—t-¼˜j÷™.¶%nµÑœu½Ždˆ¹hC´?÷Ùu^mâ")/˜ÛMNµ#ï:k¾s#ð¡å%´Ÿ(麹;¹¸Á’ šXïyÌ8•P.ŸÛ½ýÈ»ùkŠ'ä•:òn ¨ƒ§x\M°0€¼SjÜÝÆSb¶wi@í·žÔÙÓqž‘+¶ÿ­§;âFwˆ,¶;B‰›Q:ÞË!—ÄÍ ?>ã³Ûz:%nùÑ¡ågë7$gvlh¶Û² Û`ù±^kÕíœÍ17ŠüX†6C¹kâ¦)lv„œ7…ü8ˆ;Bî‰S=ªµ×zÒ“O…\Ù| Ó#æ„«?þ¼ ¹väiÑš„c{°¤ĵ!§(J´x(ãB$Ÿ˜¸.äzŸyÂFì qMÈu?ÙÇL$=!®¹ýwswóõ"güGû1{kDnd|AóÍÅþ×¼±§Ù,d8;X‚PsèíqÈ›ºëo*ÚWV»¤}"®yº£±Éej¤¡”?$¸”¨¡Ñ¢ö)ÒðBÁ‡¿áÜÌgó'L!¯E€»?Ñ !C)ÈÆÿ. ñÆp1ƒ½o¥jžÖ°|Âk,ôÌZãÕI'ä‘×GŒ¦¹PÕò‡á‚‹3ùÑó²ÐXËC¾eZ!Ÿœ P†d¶¾|븟ˆ¸äêèüxÅ)±¼Ôw¤éK= Ì\Ip|NüJà‰kÂq}8.Fµ M ïqÈk9"K7\@–{y!ø;JrŸ#Ϥ^æS”F×ÍÈÿw©?Kìqí‹?b\¥¬ ,9ž Èi© KuE@îÈR–œ¼z—Ú›OÖ;âZ«î8 …j#2”ò‡ìw4zå¢wa¡í{#¢xy¼m>7µX®§ƒ• ò ‘×S–\æ|BÈPª6æ8ãMèV¼‘‹}*ÈqV&‰\tRsÂÒ«„\#ò£Ò^–޹²i.‡€(×÷WÇ=N„SÍîÙGâzî$ƒg‰bö—º~B9»…o÷š{/½$®¹þ6JKžÑO⚆(tÿ–Q¿~uóu#JyÚn¶b-Š~ýKÈõ"/þD3Õ!+ˆ´Ô^o‰kUª8Da±&¥Ø,+¨E;¡¿Ä­ÿwsòÃJ€í1qëÈYhc–mŸ‰Ûÿ‡òä#ZŠúÃ_9 » ž—³yÝÍ ;zO»@î³°«²ßç îy©ÐgÊÑûNÜr鿦½÷Ä] ß Qšpô><&ÑC䥛pô^'ä‘WnîiÎæN¸ä[7×wO‚¸äÛ¤…|AâŽK7¾R7ß5ráæ/šõ»›ï¹¸¡H?íò>"'¯Tsð=âÎënðN‡¸cäÚ˜Ÿq×È5Í|èëü‰^"×âŸ'’÷¹æ§E¼È?ÌüĈ÷ù™Ÿqqû8á¦yÕõslèèœr¬E'ç}„ È‹ %6<“Ñ95䨢óáÞ:ºMä‰UÏ£ѯŽÜz±‡OÃÙEn?™cýLí!w`?gXXCnY¢¼Ç¡ÅûÄNîêoÐz‚Ü‘ñ}Œæ¶wAÐÏñ‡Ì_QÄÿïp„ ÈCîOo××d¾¾JÇ5 £§ýÒÈ×SÂ¥FøSÿÙz@n9×T£&I¢¹)/¿!+à Ôáó€ÜJ,GÓd}OfëQ0ÄrK^þsŒ^ W=Ÿ«ú8rs±\jUfÐqý= ·Ö|Šü¡ðüy@nÞvV*(ЮÙrëË€|@> ÈMØÎ^!‰ãøj;bÁæðõ¾>€ÁWMůãø2ÅÕ¦~¸Õ¦)ùU蟳«¹bûÝyâ þÁE¹Éÿ î®jÄáûþ@ظSxYÅuÄ»ßw³Ëé€|kû `7Z¾Ͼ¥ó‰üYþù»aÁ&ž02ã¿ ”Ü ±Jºa â€û?ÿW!Ûð F¾Þær+aØ€¼´}}HáåàÈþ0¿ü7„%z€€Áø_¥|õF’/*äË c›gø¾ü Us ÛÈìºxõ3¹BÌb@^Ú~ >øA¤_®Èüžütp£Å2æá&Ås.(çqåÛÿ(ȃë*–³dDü«ê¥xÍ‚â®ÀÓÍ€¼´ûtÉc³?Ïä—Œ»ßK@üáÓh‰èj#X)ZØí!ŸÙm<¹Aõ;º.^+äåûWˆ+‡›³"–‹ ãÇ•ˆÔ2xpäÙ~š‘ï¼~€:û׉Ò\ò_d¼ 8Õùˆ×™|õs¹•YÑ~~åáæÒxîᥗ“<šŠöðÂÈ„päÌŸ¤ïIOp°y¦›ôßqćã¦Åm¶¹Ÿ‹ÄéÏ͈ÜNÒdR¼ú™ÜJÊâKOª(g$2È ÝÉ®±™gX~Ήœ÷ÏHñBPMÔ—ÿC„…=\Å¿‰×œ[Wä ¾öÔ¡Òü™òÿYÁÄйæSµš¿,òbRfÕ3$·Ssæ^Å+ýêÓ@ µü`C_ŸÛ²<¥ßâTPŒz*âÙõ;kÐq²üDœ.ay;²óŸùÂò]ôÈÎäÃ2 ȇe@~ú˹À =·f¸IEND®B`‚WebwareForPython-DBUtils-ed2a1f2/docs/doc.css000066400000000000000000000054301457556542700212060ustar00rootroot00000000000000/* Webware for Python (https://webwareforpython.github.io/w4py/) Common style sheet for Webware's documentation pages */ /* First import default style for pages created with Docutils: */ @import url(docutils.css); /* Customization for Webware goes here: */ body { background-color: #FFFFFF; font-family: Verdana, Arial, Helvetica, sans-serif; font-size: 10pt; padding: 12pt; } table { empty-cells: show; border-collapse: collapse; margin: 0 auto; } table.doc { border-spacing: 2px; border-collapse: separate; border-style: none; } td, th { font-family: Verdana, Arial, Helvetica, sans-serif; font-size: 10pt; } table.doc td, table.doc th { padding: 4px; border-style: none; } p { margin-top: 6pt; margin-bottom: 6pt; text-align: justify; } li { margin-bottom: 6pt; } h1, h2 { font-family: Verdana, Arial, Helvetica, sans-serif; color: #002352; } h3, h4 { font-family: Verdana, Arial, Helvetica, sans-serif; color: #002352; } h1 { font-size: 18pt; } h2 { font-size: 16pt; } h3 { font-size: 14pt; } h4 { font-size: 12pt; } h5 { font-size: 11pt; } h6 { font-size: 10pt; } h1.titlebar { padding: 4pt; margin-bottom: 12pt; text-align: center; color: white; background-color: #025; } h1.title, h1.header { padding: 4pt; margin-bottom: 12pt; text-align: center; border-bottom: 1pt solid #025; padding-bottom: 8pt; } div.footer { font-family: Tahoma, Arial, Helvetica, sans-serif; font-size: 9pt; text-align: center; padding: 4pt; margin-top: 16pt; border-top: 1pt solid #025; } .left { text-align: left; } .right { text-align: right; } .center { text-align: center; } .top { vertical-align: top; } .nowrap { white-space: nowrap; } .contents { font-family: Tahoma, Arial, Helvetica, sans-serif; } .contents ul { list-style: none; margin-bottom: 24pt; padding-left: 0em; margin-left: 2em; } .contents ul li { font-size: 11pt; margin-bottom: 3pt; } .contents ul ul { list-style-type: none; margin-top: 2pt; margin-bottom: 2pt; padding-left: 0em; margin-left: 1.5em; } .contents ul ul li { font-size: 10pt; margin-bottom: 1pt; } .contents .topic-title { font-size: 16pt; } span.name { font-weight: bold; } span.filename { font-family: Tahoma, Arial, Helvetica, sans-serif; font-size: 9pt; } code, .literal, .literal-block, .pre, .py { font-family: "Andale Mono", "Lucida Console", Monaco, "Courier New", Courier, monospace; font-size: 10pt; color: #052; } tt.literal, span.pre { background-color: #FFFFFF; } pre.py, pre.literal-block { margin: 0; padding: 2pt 1pt 1pt 2pt; background-color: #F0F0F8; } .typed { font-weight: bold; } .error { color: red; } .warning { color: brown; } /* Configuration documentation: */ dl.config { } dt.config { } dd.config { } span.setting { font-family: Tahoma, Arial, Helvetica, sans-serif; font-size: 9pt; font-weight: bold; } WebwareForPython-DBUtils-ed2a1f2/docs/docutils.css000066400000000000000000000050221457556542700222640ustar00rootroot00000000000000/* CSS 3.1 style sheet for the output of Docutils 0.17 HTML writer. */ body{margin:0;background-color:#dbdbdb}main,footer,header{line-height:1.3;max-width:50rem;padding:1px 2%;margin:auto}main{counter-reset:table figure;background-color:white}footer,header{font-size:smaller;padding:.5em 2%;border:0}hr.docutils{width:80%;margin-top:1em;margin-bottom:1em;clear:both}p,ol,ul,dl,li,dd,div.line-block,div.topic,table{margin-top:.5em;margin-bottom:.5em}p:first-child{margin-top:0}p:last-child{margin-bottom:0}h1,h2,h3,h4,h5,h6,dl>dd{margin-bottom:.5em}dl>dd,ol>li,dd>ul:only-child,dd>ol:only-child{padding-left:1em}dl.description>dt{font-weight:bold;clear:left;float:left;margin:0;padding:0;padding-right:.5em}dl.field-list.narrow>dd{margin-left:5em}dl.field-list.run-in>dd p{display:block}div.abstract p.topic-title{text-align:center}div.dedication{margin:2em 5em;text-align:center;font-style:italic}div.dedication p.topic-title{font-style:normal}pre.literal-block,pre.doctest-block,pre.math,pre.code{font-family:monospace}blockquote>table,div.topic>table{margin-top:0;margin-bottom:0}blockquote p.attribution,div.topic p.attribution{text-align:right;margin-left:20%}table tr{text-align:left}table.booktabs{border:0;border-top:2px solid;border-bottom:2px solid;border-collapse:collapse}table.booktabs *{border:0}table.booktabs th{border-bottom:thin solid}table.numbered>caption:before{counter-increment:table;content:"Table " counter(table) ": ";font-weight:bold}dl.footnote{padding-left:1ex;border-left:solid;border-left-width:thin}figure.align-left,img.align-left,video.align-left,object.align-left{clear:left;float:left;margin-right:1em}figure.align-right,img.align-right,video.align-right,object.align-right{clear:right;float:right;margin-left:1em}h1,h2,h3,h4,footer,header{clear:both}figure.numbered>figcaption>p:before{counter-increment:figure;content:"Figure " counter(figure) ": ";font-weight:bold}.caution p.admonition-title,.attention p.admonition-title,.danger p.admonition-title,.error p.admonition-title,.warning p.admonition-title,div.error{color:red}aside.sidebar{width:30%;max-width:26em;margin-left:1em;margin-right:-2%;background-color:#ffe}pre.code{padding:.7ex}pre.code,code{background-color:#eee}pre.code .comment,code .comment{color:#5c6576}pre.code .keyword,code .keyword{color:#3b0d06;font-weight:bold}pre.code .literal.string,code .literal.string{color:#0c5404}pre.code .name.builtin,code .name.builtin{color:#352b84}pre.code .deleted,code .deleted{background-color:#deb0a1}pre.code .inserted,code .inserted{background-color:#a3d289}a{text-decoration:none}WebwareForPython-DBUtils-ed2a1f2/docs/main.de.html000066400000000000000000001113601457556542700221300ustar00rootroot00000000000000 Benutzeranleitung für DBUtils

Benutzeranleitung für DBUtils

Version:
3.1.0
Translations:

English | German

Zusammenfassung

DBUtils ist eine Sammlung von Python-Modulen, mit deren Hilfe man in Python geschriebene Multithread-Anwendungen auf sichere und effiziente Weise an Datenbanken anbinden kann.

DBUtils wurde ursprünglich speziell für Webware for Python als Anwendung und PyGreSQL als PostgreSQL-Datenbankadapter entwickelt, kann aber inzwischen für beliebige Python-Anwendungen und beliebige auf DB-API 2 beruhende Python-Datenbankadapter verwendet werden.

Module

DBUtils ist als Python-Package realisiert worden, das aus zwei verschiedenen Gruppen von Modulen besteht: Einer Gruppe zur Verwendung mit beliebigen DB-API-2-Datenbankadaptern, und einer Gruppe zur Verwendung mit dem klassischen PyGreSQL-Datenbankadapter-Modul.

Allgemeine Variante für beliebige DB-API-2-Adapter

steady_db

Gehärtete DB-API-2-Datenbankverbindungen

pooled_db

Pooling für DB-API-2-Datenbankverbindungen

persistent_db

Persistente DB-API-2-Datenbankverbindungen

simple_pooled_db

Einfaches Pooling für DB-API 2

Variante speziell für den klassischen PyGreSQL-Adapter

steady_pg

Gehärtete klassische PyGreSQL-Verbindungen

pooled_pg

Pooling für klassische PyGreSQL-Verbindungen

persistent_pg

Persistente klassische PyGreSQL-Verbindungen

simple_pooled_pg

Einfaches Pooling für klassisches PyGreSQL

Die Abhängigkeiten der Module in der Variante für beliebige DB-API-2-Adapter sind im folgenden Diagramm dargestellt:

dependencies_db.png

Die Abhängigkeiten der Module in der Variante für den klassischen PyGreSQL-Adapter sehen ähnlich aus:

depdependencies_pg.png

Download

Die aktuelle Version von DBUtils kann vom Python Package Index heruntergeladen werden:

https://pypi.python.org/pypi/DBUtils

Das Source-Code-Repository befindet sich hier auf GitHub:

https://github.com/WebwareForPython/DBUtils

Installation

Installation

Das Paket kann auf die übliche Weise installiert werden:

python setup.py install

Noch einfacher ist, das Paket in einem Schritt mit pip automatisch herunterzuladen und zu installieren:

pip install DBUtils

Anforderungen

DBUtils unterstützt die Python Versionen 3.7 bis 3.12.

Die Module in der Variante für klassisches PyGreSQL benötigen PyGreSQL Version 4.0 oder höher, während die Module in der allgemeinen Variante für DB-API 2 mit jedem beliebigen Python-Datenbankadapter-Modul zusammenarbeiten, das auf DB-API 2 basiert.

Funktionalität

Dieser Abschnitt verwendet nur die Bezeichnungen der DB-API-2-Variante, aber Entsprechendes gilt auch für die PyGreSQL-Variante.

DBUtils installiert sich als Paket dbutils, das alle hier beschriebenen Module enthält. Jedes dieser Modul enthält im Wesentlichen eine Klasse, die einen analogen Namen trägt und die jeweilige Funktionalität bereitstellt. So enthält z.B. das Modul dbutils.pooled_db die Klasse PooledDB.

SimplePooledDB (simple_pooled_db)

Die Klasse SimplePooledDB in dbutils.simple_pooled_db ist eine sehr elementare Referenz-Implementierung eines Pools von Datenbankverbindungen. Hiermit ist ein Vorratsspeicher an Datenbankverbindungen gemeint, aus dem sich die Python-Anwendung bedienen kann. Diese Implementierung ist weit weniger ausgefeilt als das eigentliche pooled_db-Modul und stellt insbesondere keine Ausfallsicherung zur Verfügung. dbutils.simple_pooled_db ist im Wesentlichen identisch mit dem zu Webware for Python gehörenden Modul MiscUtils.DBPool. Es ist eher zur Verdeutlichung des Konzepts gedacht, als zum Einsatz im produktiven Betrieb.

SteadyDBConnection (steady_db)

Die Klasse SteadyDBConnection im Modul dbutils.steady_db stellt "gehärtete" Datenbankverbindungen bereit, denen gewöhnlichen Verbindungen eines DB-API-2-Datenbankadapters zugrunde liegen. Eine "gehärtete" Verbindung wird bei Zugriff automatisch, ohne dass die Anwendung dies bemerkt, wieder geöffnet, wenn sie geschlossen wurde, die Datenbankverbindung unterbrochen wurde, oder wenn sie öfter als ein optionales Limit genutzt wurde.

Ein typisches Beispiel wo dies benötig wird, ist, wenn die Datenbank neu gestartet wurde, während Ihre Anwendung immer noch läuft und Verbindungen zur Datenbank offen hat, oder wenn Ihre Anwendung auf eine entfernte Datenbank über ein Netzwerk zugreift, das durch eine Firewall geschützt ist, und die Firewall neu gestartet wurde und dabei ihren Verbindungsstatus verloren hat.

Normalerweise benutzen Sie das steady_db-Modul nicht direkt; es wird aber von den beiden nächsten Modulen benötigt, persistent_db und pooled_db.

PersistentDB (persistent_db)

Die Klasse PersistentDB im Modul dbutils.persistent_db stellt gehärtete, thread-affine, persistente Datenbankverbindungen zur Verfügung, unter Benutzung eines beliebigen DB-API-2-Datenbankadapters. Mit "thread-affin" und "persistent" ist hierbei gemeint, dass die einzelnen Datenbankverbindungen den jeweiligen Threads fest zugeordnet bleiben und während der Laufzeit des Threads nicht geschlossen werden.

Das folgende Diagramm zeigt die beteiligten Verbindungsschichten, wenn Sie persistent_db-Datenbankverbindungen einsetzen:

persistent.png

Immer wenn ein Thread eine Datenbankverbindung zum ersten Mal öffnet, wird eine neue Datenbankverbindung geöffnet, die von da an immer wieder für genau diesen Thread verwendet wird. Wenn der Thread die Datenbankverbindung schließt, wird sie trotzdem weiter offen gehalten, damit beim nächsten Mal, wenn der gleiche Thread wieder eine Datenbankverbindung anfordert, diese gleiche bereits geöffnete Datenbankverbindung wieder verwendet werden kann. Die Verbindung wird automatisch geschlossen, wenn der Thread beendet wird.

Kurz gesagt versucht persistent_db Datenbankverbindungen wiederzuverwerten, um die Gesamteffizienz der Datenbankzugriffe Ihrer Multithread-Anwendungen zu steigern, aber es wird dabei sichergestellt, dass verschiedene Threads niemals die gleiche Verbindung benutzen.

Daher arbeitet persistent_db sogar dann problemlos, wenn der zugrunde liegende DB-API-2-Datenbankadapter nicht thread-sicher auf der Verbindungsebene ist, oder wenn parallele Threads Parameter der Datenbank-Sitzung verändern oder Transaktionen mit mehreren SQL-Befehlen durchführen.

PooledDB (pooled_db)

Die Klasse PooledDB im Modul dbutils.pooled_db stellt, unter Benutzung eines beliebigen DB-API-2-Datenbankadapters, einen Pool von gehärteten, thread-sicheren Datenbankverbindungen zur Verfügung, die automatisch, ohne dass die Anwendung dies bemerkt, wiederverwendet werden.

Das folgende Diagramm zeigt die beteiligten Verbindungsschichten, wenn Sie pooled_db-Datenbankverbindungen einsetzen:

pooled.png

Wie im Diagramm angedeutet, kann pooled_db geöffnete Datenbankverbindungen den verschiedenen Threads beliebig zuteilen. Dies geschieht standardmäßig, wenn Sie den Verbindungspool mit einem positiven Wert für maxshared einrichten und der zugrunde liegende DB-API-2-Datenbankadapter auf der Verbindungsebene thread-sicher ist, aber sie können auch dedizierte Datenbankverbindungen anfordern, die nicht von anderen Threads verwendet werden sollen. Neben dem Pool gemeinsam genutzter Datenbankverbindungen ("shared pool") können Sie auch einen Pool von mindestens mincached und höchstens maxcached inaktiven Verbindungen auf Vorrat einrichten ("idle pool"), aus dem immer dann geschöpft wird, wenn ein Thread eine dedizierte Datenbankverbindung anfordert, oder wenn der Pool gemeinsam genutzter Datenbankverbindungen noch nicht voll ist. Wenn ein Thread eine Datenbankverbindung schließt, die auch von keinem anderen Thread mehr benutzt wird, wird sie an den Vorratsspeicher inaktiver Datenbankverbindungen zurückgegeben, damit sie wiederverwertet werden kann.

Wenn der zugrunde liegende DB-API-Datenbankadapter nicht thread-sicher ist, werden Thread-Locks verwendet, um sicherzustellen, dass die pooled_db-Verbindungen dennoch thread-sicher sind. Sie brauchen sich also hierum keine Sorgen zu machen, aber Sie sollten darauf achten, dedizierte Datenbankverbindungen zu verwenden, sobald Sie Parameter der Datenbanksitzung verändern oder Transaktionen mit mehreren SQL-Befehlen ausführen.

Die Qual der Wahl

Sowohl persistent_db als auch pooled_db dienen dem gleichen Zweck, nämlich die Effizienz des Datenbankzugriffs durch Wiederverwendung von Datenbankverbindungen zu steigern, und dabei gleichzeitig die Stabilität zu gewährleisten, selbst wenn die Datenbankverbindung unterbrochen wird.

Welches der beiden Module sollte also verwendet werden? Nach den obigen Erklärungen ist es klar, dass persistent_db dann sinnvoller ist, wenn Ihre Anwendung eine gleich bleibende Anzahl Threads verwendet, die häufig auf die Datenbank zugreifen. In diesem Fall werden Sie ungefähr die gleiche Anzahl geöffneter Datenbankverbindungen erhalten. Wenn jedoch Ihre Anwendung häufig Threads beendet und neu startet, dann ist pooled_db die bessere Lösung, die auch mehr Möglichkeiten zur Feineinstellung zur Verbesserung der Effizienz erlaubt, insbesondere bei Verwendung eines thread-sicheren DB-API-2-Datenbankadapters.

Da die Schnittstellen beider Module sehr ähnlich sind, können Sie recht einfach von einem Modul zum anderen wechseln und austesten, welches geeigneter ist.

Benutzung

Die Benutzung aller Module ist zwar recht ähnlich, aber es gibt vor allem bei der Initialisierung auch einige Unterschiede, sowohl zwischen den "Pooled"- und den "Persistent"-Varianten, als auch zwischen den DB-API-2- und den PyGreSQL-Varianten.

Wir werden hier nur auf das persistent_db-Modul und das etwas kompliziertere pooled_db-Modul eingehen. Einzelheiten zu den anderen Modulen finden Sie in deren Docstrings. Unter Verwendung der Python-Interpreter-Konsole können Sie sich die Dokumentation des pooled_db-Moduls wie folgt anzeigen lassen (dies funktioniert entsprechend auch mit den anderen Modulen):

help(pooled_db)

PersistentDB (persistent_db)

Wenn Sie das persistent_db-Modul einsetzen möchten, müssen Sie zuerst einen Generator für die von Ihnen gewünschte Art von Datenbankverbindungen einrichten, indem Sie eine Instanz der Klasse persistent_db erzeugen, wobei Sie folgende Parameter angeben müssen:

  • creator: entweder eine Funktion, die neue DB-API-2-Verbindungen erzeugt, oder ein DB-API-2-Datenbankadapter-Modul

  • maxusage: Obergrenze dafür, wie oft eine einzelne Verbindung wiederverwendet werden darf (der Standardwert 0 oder None bedeutet unbegrenzte Wiederverwendung)

    Sobald diese Obergrenze erreicht wird, wird die Verbindung zurückgesetzt.

  • setsession: eine optionale Liste von SQL-Befehlen zur Initialisierung der Datenbanksitzung, z.B. ["set datestyle to german", ...]

  • failures: eine optionale Exception-Klasse oder ein Tupel von Exceptions, bei denen die Ausfallsicherung zum Tragen kommen soll, falls die Vorgabe (OperationalError, InterfaceError, InternalError) für das verwendete Datenbankadapter-Modul nicht geeignet sein sollte

  • ping: mit diesem Parameter kann eingestellt werden, wann Verbindungen mit der ping()-Methode geprüft werden, falls eine solche vorhanden ist (0 = None = nie, 1 = Standardwert = immer wenn neu angefragt, 2 = vor Erzeugen eines Cursors, 4 = vor dem Ausführen von Abfragen, 7 = immer, und alle Bitkombinationen dieser Werte)

  • closeable: wenn dies auf True gesetzt wird, dann wird das Schließen von Verbindungen erlaubt, normalerweise wird es jedoch ignoriert

  • threadlocal: eine optionale Klasse zur Speicherung thread-lokaler Daten, die anstelle unserer Python-Implementierung benutzt wird (threading.local ist schneller, kann aber nicht in allen Fällen verwendet werden)

  • Die als creator angegebene Funktion oder die Funktion connect des DB-API-2-Datenbankadapter-Moduls erhalten alle weiteren Parameter, wie host, database, user, password usw. Sie können einige oder alle dieser Parameter in Ihrer eigenen creator-Funktion setzen, was ausgefeilte Mechanismen zur Ausfallsicherung und Lastverteilung ermöglicht.

Wenn Sie beispielsweise pgdb als DB-API-2-Datenbankadapter verwenden, und möchten, dass jede Verbindung Ihrer lokalen Datenbank meinedb 1000 mal wiederverwendet werden soll, sieht die Initialisierung so aus:

import pgdb  # importiere das verwendete DB-API-2-Modul
from dbutils.persistent_db import PersistentDB
persist = PersistentDB(pgdb, 1000, database='meinedb')

Nachdem Sie den Generator mit diesen Parametern eingerichtet haben, können Sie derartige Datenbankverbindungen von da an wie folgt anfordern:

db = persist.connection()

Sie können diese Verbindungen verwenden, als wären sie gewöhnliche DB-API-2-Datenbankverbindungen. Genauer genommen erhalten Sie die "gehärtete" steady_db-Version der zugrunde liegenden DB-API-2-Verbindung.

Wenn Sie eine solche persistente Verbindung mit db.close() schließen, wird dies stillschweigend ignoriert, denn sie würde beim nächsten Zugriff sowieso wieder geöffnet, und das wäre nicht im Sinne persistenter Verbindungen. Stattdessen wird die Verbindung automatisch dann geschlossen, wenn der Thread endet. Sie können dieses Verhalten ändern, indem Sie den Parameter namens closeable setzen.

Bitte beachten Sie, dass Transaktionen explizit durch Aufruf der Methode begin() eingeleitet werden müssen. Hierdurch wird sichergestellt, dass das transparente Neueröffnen von Verbindungen bis zum Ende der Transaktion ausgesetzt wird, und dass die Verbindung zurückgerollt wird, before sie vom gleichen Thread erneut benutzt wird.

Das Holen einer Verbindung kann etwas beschleunigt werden, indem man den Parameter threadlocal auf threading.local setzt; dies könnte aber in einigen Umgebungen nicht funktionieren (es ist zum Beispiel bekannt, dass mod_wsgi hier Probleme bereitet, da es Daten, die mit threading.local gespeichert wurden, zwischen Requests löscht).

PooledDB (pooled_db)

Wenn Sie das pooled_db-Modul einsetzen möchten, müssen Sie zuerst einen Pool für die von Ihnen gewünschte Art von Datenbankverbindungen einrichten, indem Sie eine Instanz der Klasse pooled_db erzeugen, wobei Sie folgende Parameter angeben müssen:

  • creator: entweder eine Funktion, die neue DB-API-2-Verbindungen erzeugt, oder ein DB-API-2-Datenbankadapter-Modul

  • mincached : die anfängliche Anzahl inaktiver Verbindungen, die auf Vorrat gehalten werden sollen (der Standardwert 0 bedeutet, dass beim Start keine Verbindungen geöffnet werden)

  • maxcached: Obergrenze für die Anzahl inaktiver Verbindungen, die auf Vorrat gehalten werden sollen (der Standardwert 0 oder None bedeutet unbegrenzte Größe des Vorratsspeichers)

  • maxshared: Obergrenze für die Anzahl gemeinsam genutzer Verbindungen (der Standardwert 0 oder None bedeutet, dass alle Verbindungen dediziert sind)

    Wenn diese Obergrenze erreicht wird, werden Verbindungen wiederverwendet, wenn diese als wiederverwendbar angefordert werden.

  • maxconnections: Obergrenze für die Anzahl an Datenbankverbindungen, die insgesamt überhaupt erlaubt werden sollen (der Standardwert 0 oder None bedeutet unbegrenzte Anzahl von Datenbankverbindungen)

  • blocking: bestimmt das Verhalten bei Überschreitung dieser Obergrenze

    Wenn dies auf True gesetzt wird, dann wird so lange gewartet, bis die Anzahl an Datenbankverbindungen wieder abnimmt, normalerweise wird jedoch sofort eine Fehlermeldung ausgegeben.

  • maxusage: Obergrenze dafür, wie oft eine einzelne Verbindung wiederverwendet werden darf (der Standardwert 0 oder None bedeutet unbegrenzte Wiederverwendung)

    Sobald diese Obergrenze erreicht wird, wird die Verbindung automatisch zurückgesetzt (geschlossen und wieder neu geöffnet).

  • setsession: eine optionale Liste von SQL-Befehlen zur Initialisierung der Datenbanksitzung, z.B. ["set datestyle to german", ...]

  • reset: wie Verbindungen zurückgesetzt werden sollen, bevor sie wieder in den Verbindungspool zurückgegeben werden (False oder None um mit begin() gestartete Transaktionen zurückzurollen, der Standardwert True rollt sicherheitshalber mögliche Transaktionen immer zurück)

  • failures: eine optionale Exception-Klasse oder ein Tupel von Exceptions, bei denen die Ausfallsicherung zum Tragen kommen soll, falls die Vorgabe (OperationalError, InterfaceError, InternalError) für das verwendete Datenbankadapter-Modul nicht geeignet sein sollte

  • ping: mit diesem Parameter kann eingestellt werden, wann Verbindungen mit der ping()-Methode geprüft werden, falls eine solche vorhanden ist (0 = None = nie, 1 = Standardwert = immer wenn neu angefragt, 2 = vor Erzeugen eines Cursors, 4 = vor dem Ausführen von Abfragen, 7 = immer, und alle Bitkombinationen dieser Werte)

  • Die als creator angegebene Funktion oder die Funktion connect des DB-API-2-Datenbankadapter-Moduls erhalten alle weiteren Parameter, wie host, database, user, password usw. Sie können einige oder alle dieser Parameter in Ihrer eigenen creator-Funktion setzen, was ausgefeilte Mechanismen zur Ausfallsicherung und Lastverteilung ermöglicht.

Wenn Sie beispielsweise pgdb als DB-API-2-Datenbankadapter benutzen, und einen Pool von mindestens fünf Datenbankverbindungen zu Ihrer Datenbank meinedb verwenden möchten, dann sieht die Initialisierung so aus:

import pgdb  # importiere das verwendete DB-API-2-Modul
from dbutils.pooled_db import PooledDB
pool = PooledDB(pgdb, 5, database='meinedb')

Nachdem Sie den Pool für Datenbankverbindungen so eingerichtet haben, können Sie Verbindungen daraus wie folgt anfordern:

db = pool.connection()

Sie können diese Verbindungen verwenden, als wären sie gewöhnliche DB-API-2-Datenbankverbindungen. Genauer genommen erhalten Sie die "gehärtete" steady_db-Version der zugrunde liegenden DB-API-2-Verbindung.

Bitte beachten Sie, dass die Verbindung von anderen Threads mitgenutzt werden kann, wenn Sie den Parameter maxshared auf einen Wert größer als Null gesetzt haben, und der zugrunde liegende DB-API-2-Datenbankadapter dies erlaubt. Eine dedizierte Datenbankverbindung, die garantiert nicht von anderen Threads mitgenutzt wird, fordern Sie wie folgt an:

db = pool.connection(shareable=False)

Stattdessen können Sie eine dedizierte Verbindung auch wie folgt erhalten:

db = pool.dedicated_connection()

Wenn Sie die Datenbankverbindung nicht mehr benötigen, sollten Sie diese sofort wieder mit db.close() an den Pool zurückgeben. Sie können auf die gleiche Weise eine neue Verbindung erhalten.

Warnung: In einer Multithread-Umgebung benutzen Sie niemals:

pool.connection().cursor().execute(...)

Dies würde die Datenbankverbindung zu früh zur Wiederverwendung zurückgeben, was fatale Folgen haben könnte, wenn die Verbindungen nicht thread-sicher sind. Stellen Sie sicher, dass die Verbindungsobjekte so lange vorhanden sind, wie sie gebraucht werden, etwa so:

db = pool.connection()
cur = db.cursor()
cur.execute(...)
res = cur.fetchone()
cur.close()  # oder del cur
db.close()  # oder del db

Sie können dies auch durch Verwendung von Kontext-Managern vereinfachen:

with pool.connection() as db:
    with db.cursor() as cur:
        cur.execute(...)
        res = cur.fetchone()

Bitte beachten Sie, dass Transaktionen explizit durch Aufruf der Methode begin() eingeleitet werden müssen. Hierdurch wird sichergestellt, dass die Verbindung nicht mehr mit anderen Threads geteilt wird, dass das transparente Neueröffnen von Verbindungen bis zum Ende der Transaktion ausgesetzt wird, und dass die Verbindung zurückgerollt wird, bevor sie wieder an den Verbindungspool zurückgegeben wird.

Besonderheiten bei der Benutzung

Manchmal möchte man Datenbankverbindung besonders vorbereiten, bevor sie von DBUtils verwendet werden, und dies ist nicht immer durch Verwendung der passenden Parameter möglich. Zum Beispiel kann es pyodb erfordern, dass man die Methode setencoding() der Datenbankverbindung aufruft. Sie können dies erreichen, indem Sie eine modifizierte Version der Funktion connect() verwenden und diese als creator (dem ersten Argument) an PersistentDB oder PooledDB übergeben, etwa so:

from pyodbc import connect
from dbutils.pooled_db import PooledDB

def creator():
    con = connect(...)
    con.setdecoding(...)
    return con

creator.dbapi = pyodbc

db_pool = PooledDB(creator, mincached=5)

Anmerkungen

Wenn Sie einen der bekannten "Object-Relational Mapper" SQLObject oder SQLAlchemy verwenden, dann benötigen Sie DBUtils nicht, denn diese haben ihre eigenen Mechanismen zum Pooling von Datenbankverbindungen eingebaut. Tatsächlich hat SQLObject 2 (SQL-API) das Pooling in eine separate Schicht ausgelagert, in der Code von DBUtils verwendet wird.

Wenn Sie eine Lösung verwenden wie den Apache-Webserver mit mod_python oder mod_wsgi, dann sollten Sie bedenken, dass Ihr Python-Code normalerweise im Kontext der Kindprozesse des Webservers läuft. Wenn Sie also das pooled_db-Modul einsetzen, und mehrere dieser Kindprozesse laufen, dann werden Sie ebenso viele Pools mit Datenbankverbindungen erhalten. Wenn diese Prozesse viele Threads laufen lassen, dann mag dies eine sinnvoller Ansatz sein, wenn aber diese Prozesse nicht mehr als einen Worker-Thread starten, wie im Fall des Multi-Processing Moduls "prefork" für den Apache-Webserver, dann sollten Sie auf eine Middleware für das Connection-Pooling zurückgreifen, die Multi-Processing unterstützt, wie zum Beispiel pgpool oder pgbouncer für die PostgreSQL-Datenbank.

Zukunft

Einige Ideen für zukünftige Verbesserungen:

  • Alternativ zur Obergrenze in der Anzahl der Nutzung einer Datenbankverbindung könnte eine maximale Lebensdauer für die Verbindung implementiert werden.

  • Es könnten Module monitor_db und monitor_pg hinzugefügt werden, die in einem separaten Thread ständig den "idle pool" und eventuell auch den "shared pool" bzw. die persistenten Verbindungen überwachen. Wenn eine unterbrochene Datenbankverbindung entdeckt wird, wird diese automatisch durch den Monitor-Thread wiederhergestellt. Dies ist in einem Szenario sinnvoll, bei dem die Datenbank einer Website jede Nacht neu gestartet wird. Ohne den Monitor-Thread würden die Benutzer morgens eine kleine Verzögerung bemerken, weil erst dann die unterbrochenen Datenbankverbindungen entdeckt würden und sich der Pool langsam wieder neu aufbaut. Mit dem Monitor-Thread würde dies schon während der Nacht passieren, kurz nach der Unterbrechung. Der Monitor-Thread könnte auch so konfiguriert werden, dass er überhaupt täglich den Verbindungspool erneuert, kurz bevor die Benutzer erscheinen.

  • Optional sollten Benutzung, schlechte Verbindungen und Überschreitung von Obergrenzen in Logs gespeichert werden können.

Fehlermeldungen und Feedback

Fehlermeldungen, Patches und Feedback können Sie als Issues oder Pull Requests auf der GitHub-Projektseite von DBUtils übermitteln.

Autoren

Autor:

Christoph Zwerschke

Beiträge:

DBUtils benutzt Code, Anmerkungen und Vorschläge von Ian Bicking, Chuck Esterbrook (Webware for Python), Dan Green (DBTools), Jay Love, Michael Palmer, Tom Schwaller, Geoffrey Talvola, Warren Smith (DbConnectionPool), Ezio Vernacotola, Jehiah Czebotar, Matthew Harriger, Gregory Piñero und Josef van Eenbergen.

WebwareForPython-DBUtils-ed2a1f2/docs/main.de.rst000066400000000000000000000643751457556542700220110ustar00rootroot00000000000000Benutzeranleitung für DBUtils +++++++++++++++++++++++++++++ :Version: 3.1.0 :Translations: English_ | German .. _English: main.html .. contents:: Inhalt Zusammenfassung =============== DBUtils_ ist eine Sammlung von Python-Modulen, mit deren Hilfe man in Python_ geschriebene Multithread-Anwendungen auf sichere und effiziente Weise an Datenbanken anbinden kann. DBUtils wurde ursprünglich speziell für `Webware for Python`_ als Anwendung und PyGreSQL_ als PostgreSQL_-Datenbankadapter entwickelt, kann aber inzwischen für beliebige Python-Anwendungen und beliebige auf `DB-API 2`_ beruhende Python-Datenbankadapter verwendet werden. Module ====== DBUtils ist als Python-Package realisiert worden, das aus zwei verschiedenen Gruppen von Modulen besteht: Einer Gruppe zur Verwendung mit beliebigen DB-API-2-Datenbankadaptern, und einer Gruppe zur Verwendung mit dem klassischen PyGreSQL-Datenbankadapter-Modul. +------------------+----------------------------------------------+ | Allgemeine Variante für beliebige DB-API-2-Adapter | +==================+==============================================+ | steady_db | Gehärtete DB-API-2-Datenbankverbindungen | +------------------+----------------------------------------------+ | pooled_db | Pooling für DB-API-2-Datenbankverbindungen | +------------------+----------------------------------------------+ | persistent_db | Persistente DB-API-2-Datenbankverbindungen | +------------------+----------------------------------------------+ | simple_pooled_db | Einfaches Pooling für DB-API 2 | +------------------+----------------------------------------------+ +------------------+----------------------------------------------+ | Variante speziell für den klassischen PyGreSQL-Adapter | +==================+==============================================+ | steady_pg | Gehärtete klassische PyGreSQL-Verbindungen | +------------------+----------------------------------------------+ | pooled_pg | Pooling für klassische PyGreSQL-Verbindungen | +------------------+----------------------------------------------+ | persistent_pg | Persistente klassische PyGreSQL-Verbindungen | +------------------+----------------------------------------------+ | simple_pooled_pg | Einfaches Pooling für klassisches PyGreSQL | +------------------+----------------------------------------------+ Die Abhängigkeiten der Module in der Variante für beliebige DB-API-2-Adapter sind im folgenden Diagramm dargestellt: .. image:: dependencies_db.png Die Abhängigkeiten der Module in der Variante für den klassischen PyGreSQL-Adapter sehen ähnlich aus: .. image:: depdependencies_pg.png Download ======== Die aktuelle Version von DBUtils kann vom Python Package Index heruntergeladen werden:: https://pypi.python.org/pypi/DBUtils Das Source-Code-Repository befindet sich hier auf GitHub:: https://github.com/WebwareForPython/DBUtils Installation ============ Installation ------------ Das Paket kann auf die übliche Weise installiert werden:: python setup.py install Noch einfacher ist, das Paket in einem Schritt mit `pip`_ automatisch herunterzuladen und zu installieren:: pip install DBUtils .. _pip: https://pip.pypa.io/ Anforderungen ============= DBUtils unterstützt die Python_ Versionen 3.7 bis 3.12. Die Module in der Variante für klassisches PyGreSQL benötigen PyGreSQL_ Version 4.0 oder höher, während die Module in der allgemeinen Variante für DB-API 2 mit jedem beliebigen Python-Datenbankadapter-Modul zusammenarbeiten, das auf `DB-API 2`_ basiert. Funktionalität ============== Dieser Abschnitt verwendet nur die Bezeichnungen der DB-API-2-Variante, aber Entsprechendes gilt auch für die PyGreSQL-Variante. DBUtils installiert sich als Paket ``dbutils``, das alle hier beschriebenen Module enthält. Jedes dieser Modul enthält im Wesentlichen eine Klasse, die einen analogen Namen trägt und die jeweilige Funktionalität bereitstellt. So enthält z.B. das Modul ``dbutils.pooled_db`` die Klasse ``PooledDB``. SimplePooledDB (simple_pooled_db) --------------------------------- Die Klasse ``SimplePooledDB`` in ``dbutils.simple_pooled_db`` ist eine sehr elementare Referenz-Implementierung eines Pools von Datenbankverbindungen. Hiermit ist ein Vorratsspeicher an Datenbankverbindungen gemeint, aus dem sich die Python-Anwendung bedienen kann. Diese Implementierung ist weit weniger ausgefeilt als das eigentliche ``pooled_db``-Modul und stellt insbesondere keine Ausfallsicherung zur Verfügung. ``dbutils.simple_pooled_db`` ist im Wesentlichen identisch mit dem zu Webware for Python gehörenden Modul ``MiscUtils.DBPool``. Es ist eher zur Verdeutlichung des Konzepts gedacht, als zum Einsatz im produktiven Betrieb. SteadyDBConnection (steady_db) ------------------------------ Die Klasse ``SteadyDBConnection`` im Modul ``dbutils.steady_db`` stellt "gehärtete" Datenbankverbindungen bereit, denen gewöhnlichen Verbindungen eines DB-API-2-Datenbankadapters zugrunde liegen. Eine "gehärtete" Verbindung wird bei Zugriff automatisch, ohne dass die Anwendung dies bemerkt, wieder geöffnet, wenn sie geschlossen wurde, die Datenbankverbindung unterbrochen wurde, oder wenn sie öfter als ein optionales Limit genutzt wurde. Ein typisches Beispiel wo dies benötig wird, ist, wenn die Datenbank neu gestartet wurde, während Ihre Anwendung immer noch läuft und Verbindungen zur Datenbank offen hat, oder wenn Ihre Anwendung auf eine entfernte Datenbank über ein Netzwerk zugreift, das durch eine Firewall geschützt ist, und die Firewall neu gestartet wurde und dabei ihren Verbindungsstatus verloren hat. Normalerweise benutzen Sie das ``steady_db``-Modul nicht direkt; es wird aber von den beiden nächsten Modulen benötigt, ``persistent_db`` und ``pooled_db``. PersistentDB (persistent_db) ---------------------------- Die Klasse ``PersistentDB`` im Modul ``dbutils.persistent_db`` stellt gehärtete, thread-affine, persistente Datenbankverbindungen zur Verfügung, unter Benutzung eines beliebigen DB-API-2-Datenbankadapters. Mit "thread-affin" und "persistent" ist hierbei gemeint, dass die einzelnen Datenbankverbindungen den jeweiligen Threads fest zugeordnet bleiben und während der Laufzeit des Threads nicht geschlossen werden. Das folgende Diagramm zeigt die beteiligten Verbindungsschichten, wenn Sie ``persistent_db``-Datenbankverbindungen einsetzen: .. image:: persistent.png Immer wenn ein Thread eine Datenbankverbindung zum ersten Mal öffnet, wird eine neue Datenbankverbindung geöffnet, die von da an immer wieder für genau diesen Thread verwendet wird. Wenn der Thread die Datenbankverbindung schließt, wird sie trotzdem weiter offen gehalten, damit beim nächsten Mal, wenn der gleiche Thread wieder eine Datenbankverbindung anfordert, diese gleiche bereits geöffnete Datenbankverbindung wieder verwendet werden kann. Die Verbindung wird automatisch geschlossen, wenn der Thread beendet wird. Kurz gesagt versucht ``persistent_db`` Datenbankverbindungen wiederzuverwerten, um die Gesamteffizienz der Datenbankzugriffe Ihrer Multithread-Anwendungen zu steigern, aber es wird dabei sichergestellt, dass verschiedene Threads niemals die gleiche Verbindung benutzen. Daher arbeitet ``persistent_db`` sogar dann problemlos, wenn der zugrunde liegende DB-API-2-Datenbankadapter nicht thread-sicher auf der Verbindungsebene ist, oder wenn parallele Threads Parameter der Datenbank-Sitzung verändern oder Transaktionen mit mehreren SQL-Befehlen durchführen. PooledDB (pooled_db) -------------------- Die Klasse ``PooledDB`` im Modul ``dbutils.pooled_db`` stellt, unter Benutzung eines beliebigen DB-API-2-Datenbankadapters, einen Pool von gehärteten, thread-sicheren Datenbankverbindungen zur Verfügung, die automatisch, ohne dass die Anwendung dies bemerkt, wiederverwendet werden. Das folgende Diagramm zeigt die beteiligten Verbindungsschichten, wenn Sie ``pooled_db``-Datenbankverbindungen einsetzen: .. image:: pooled.png Wie im Diagramm angedeutet, kann ``pooled_db`` geöffnete Datenbankverbindungen den verschiedenen Threads beliebig zuteilen. Dies geschieht standardmäßig, wenn Sie den Verbindungspool mit einem positiven Wert für ``maxshared`` einrichten und der zugrunde liegende DB-API-2-Datenbankadapter auf der Verbindungsebene thread-sicher ist, aber sie können auch dedizierte Datenbankverbindungen anfordern, die nicht von anderen Threads verwendet werden sollen. Neben dem Pool gemeinsam genutzter Datenbankverbindungen ("shared pool") können Sie auch einen Pool von mindestens ``mincached`` und höchstens ``maxcached`` inaktiven Verbindungen auf Vorrat einrichten ("idle pool"), aus dem immer dann geschöpft wird, wenn ein Thread eine dedizierte Datenbankverbindung anfordert, oder wenn der Pool gemeinsam genutzter Datenbankverbindungen noch nicht voll ist. Wenn ein Thread eine Datenbankverbindung schließt, die auch von keinem anderen Thread mehr benutzt wird, wird sie an den Vorratsspeicher inaktiver Datenbankverbindungen zurückgegeben, damit sie wiederverwertet werden kann. Wenn der zugrunde liegende DB-API-Datenbankadapter nicht thread-sicher ist, werden Thread-Locks verwendet, um sicherzustellen, dass die ``pooled_db``-Verbindungen dennoch thread-sicher sind. Sie brauchen sich also hierum keine Sorgen zu machen, aber Sie sollten darauf achten, dedizierte Datenbankverbindungen zu verwenden, sobald Sie Parameter der Datenbanksitzung verändern oder Transaktionen mit mehreren SQL-Befehlen ausführen. Die Qual der Wahl ----------------- Sowohl ``persistent_db`` als auch ``pooled_db`` dienen dem gleichen Zweck, nämlich die Effizienz des Datenbankzugriffs durch Wiederverwendung von Datenbankverbindungen zu steigern, und dabei gleichzeitig die Stabilität zu gewährleisten, selbst wenn die Datenbankverbindung unterbrochen wird. Welches der beiden Module sollte also verwendet werden? Nach den obigen Erklärungen ist es klar, dass ``persistent_db`` dann sinnvoller ist, wenn Ihre Anwendung eine gleich bleibende Anzahl Threads verwendet, die häufig auf die Datenbank zugreifen. In diesem Fall werden Sie ungefähr die gleiche Anzahl geöffneter Datenbankverbindungen erhalten. Wenn jedoch Ihre Anwendung häufig Threads beendet und neu startet, dann ist ``pooled_db`` die bessere Lösung, die auch mehr Möglichkeiten zur Feineinstellung zur Verbesserung der Effizienz erlaubt, insbesondere bei Verwendung eines thread-sicheren DB-API-2-Datenbankadapters. Da die Schnittstellen beider Module sehr ähnlich sind, können Sie recht einfach von einem Modul zum anderen wechseln und austesten, welches geeigneter ist. Benutzung ========= Die Benutzung aller Module ist zwar recht ähnlich, aber es gibt vor allem bei der Initialisierung auch einige Unterschiede, sowohl zwischen den "Pooled"- und den "Persistent"-Varianten, als auch zwischen den DB-API-2- und den PyGreSQL-Varianten. Wir werden hier nur auf das ``persistent_db``-Modul und das etwas kompliziertere ``pooled_db``-Modul eingehen. Einzelheiten zu den anderen Modulen finden Sie in deren Docstrings. Unter Verwendung der Python-Interpreter-Konsole können Sie sich die Dokumentation des ``pooled_db``-Moduls wie folgt anzeigen lassen (dies funktioniert entsprechend auch mit den anderen Modulen):: help(pooled_db) PersistentDB (persistent_db) ---------------------------- Wenn Sie das ``persistent_db``-Modul einsetzen möchten, müssen Sie zuerst einen Generator für die von Ihnen gewünschte Art von Datenbankverbindungen einrichten, indem Sie eine Instanz der Klasse ``persistent_db`` erzeugen, wobei Sie folgende Parameter angeben müssen: * ``creator``: entweder eine Funktion, die neue DB-API-2-Verbindungen erzeugt, oder ein DB-API-2-Datenbankadapter-Modul * ``maxusage``: Obergrenze dafür, wie oft eine einzelne Verbindung wiederverwendet werden darf (der Standardwert ``0`` oder ``None`` bedeutet unbegrenzte Wiederverwendung) Sobald diese Obergrenze erreicht wird, wird die Verbindung zurückgesetzt. * ``setsession``: eine optionale Liste von SQL-Befehlen zur Initialisierung der Datenbanksitzung, z.B. ``["set datestyle to german", ...]`` * ``failures``: eine optionale Exception-Klasse oder ein Tupel von Exceptions, bei denen die Ausfallsicherung zum Tragen kommen soll, falls die Vorgabe (OperationalError, InterfaceError, InternalError) für das verwendete Datenbankadapter-Modul nicht geeignet sein sollte * ``ping``: mit diesem Parameter kann eingestellt werden, wann Verbindungen mit der ``ping()``-Methode geprüft werden, falls eine solche vorhanden ist (``0`` = ``None`` = nie, ``1`` = Standardwert = immer wenn neu angefragt, ``2`` = vor Erzeugen eines Cursors, ``4`` = vor dem Ausführen von Abfragen, ``7`` = immer, und alle Bitkombinationen dieser Werte) * ``closeable``: wenn dies auf ``True`` gesetzt wird, dann wird das Schließen von Verbindungen erlaubt, normalerweise wird es jedoch ignoriert * ``threadlocal``: eine optionale Klasse zur Speicherung thread-lokaler Daten, die anstelle unserer Python-Implementierung benutzt wird (threading.local ist schneller, kann aber nicht in allen Fällen verwendet werden) * Die als ``creator`` angegebene Funktion oder die Funktion ``connect`` des DB-API-2-Datenbankadapter-Moduls erhalten alle weiteren Parameter, wie ``host``, ``database``, ``user``, ``password`` usw. Sie können einige oder alle dieser Parameter in Ihrer eigenen ``creator``-Funktion setzen, was ausgefeilte Mechanismen zur Ausfallsicherung und Lastverteilung ermöglicht. Wenn Sie beispielsweise ``pgdb`` als DB-API-2-Datenbankadapter verwenden, und möchten, dass jede Verbindung Ihrer lokalen Datenbank ``meinedb`` 1000 mal wiederverwendet werden soll, sieht die Initialisierung so aus:: import pgdb # importiere das verwendete DB-API-2-Modul from dbutils.persistent_db import PersistentDB persist = PersistentDB(pgdb, 1000, database='meinedb') Nachdem Sie den Generator mit diesen Parametern eingerichtet haben, können Sie derartige Datenbankverbindungen von da an wie folgt anfordern:: db = persist.connection() Sie können diese Verbindungen verwenden, als wären sie gewöhnliche DB-API-2-Datenbankverbindungen. Genauer genommen erhalten Sie die "gehärtete" ``steady_db``-Version der zugrunde liegenden DB-API-2-Verbindung. Wenn Sie eine solche persistente Verbindung mit ``db.close()`` schließen, wird dies stillschweigend ignoriert, denn sie würde beim nächsten Zugriff sowieso wieder geöffnet, und das wäre nicht im Sinne persistenter Verbindungen. Stattdessen wird die Verbindung automatisch dann geschlossen, wenn der Thread endet. Sie können dieses Verhalten ändern, indem Sie den Parameter namens ``closeable`` setzen. Bitte beachten Sie, dass Transaktionen explizit durch Aufruf der Methode ``begin()`` eingeleitet werden müssen. Hierdurch wird sichergestellt, dass das transparente Neueröffnen von Verbindungen bis zum Ende der Transaktion ausgesetzt wird, und dass die Verbindung zurückgerollt wird, before sie vom gleichen Thread erneut benutzt wird. Das Holen einer Verbindung kann etwas beschleunigt werden, indem man den Parameter ``threadlocal`` auf ``threading.local`` setzt; dies könnte aber in einigen Umgebungen nicht funktionieren (es ist zum Beispiel bekannt, dass ``mod_wsgi`` hier Probleme bereitet, da es Daten, die mit ``threading.local`` gespeichert wurden, zwischen Requests löscht). PooledDB (pooled_db) -------------------- Wenn Sie das ``pooled_db``-Modul einsetzen möchten, müssen Sie zuerst einen Pool für die von Ihnen gewünschte Art von Datenbankverbindungen einrichten, indem Sie eine Instanz der Klasse ``pooled_db`` erzeugen, wobei Sie folgende Parameter angeben müssen: * ``creator``: entweder eine Funktion, die neue DB-API-2-Verbindungen erzeugt, oder ein DB-API-2-Datenbankadapter-Modul * ``mincached`` : die anfängliche Anzahl inaktiver Verbindungen, die auf Vorrat gehalten werden sollen (der Standardwert ``0`` bedeutet, dass beim Start keine Verbindungen geöffnet werden) * ``maxcached``: Obergrenze für die Anzahl inaktiver Verbindungen, die auf Vorrat gehalten werden sollen (der Standardwert ``0`` oder ``None`` bedeutet unbegrenzte Größe des Vorratsspeichers) * ``maxshared``: Obergrenze für die Anzahl gemeinsam genutzer Verbindungen (der Standardwert ``0`` oder ``None`` bedeutet, dass alle Verbindungen dediziert sind) Wenn diese Obergrenze erreicht wird, werden Verbindungen wiederverwendet, wenn diese als wiederverwendbar angefordert werden. * ``maxconnections``: Obergrenze für die Anzahl an Datenbankverbindungen, die insgesamt überhaupt erlaubt werden sollen (der Standardwert ``0`` oder ``None`` bedeutet unbegrenzte Anzahl von Datenbankverbindungen) * ``blocking``: bestimmt das Verhalten bei Überschreitung dieser Obergrenze Wenn dies auf ``True`` gesetzt wird, dann wird so lange gewartet, bis die Anzahl an Datenbankverbindungen wieder abnimmt, normalerweise wird jedoch sofort eine Fehlermeldung ausgegeben. * ``maxusage``: Obergrenze dafür, wie oft eine einzelne Verbindung wiederverwendet werden darf (der Standardwert ``0`` oder ``None`` bedeutet unbegrenzte Wiederverwendung) Sobald diese Obergrenze erreicht wird, wird die Verbindung automatisch zurückgesetzt (geschlossen und wieder neu geöffnet). * ``setsession``: eine optionale Liste von SQL-Befehlen zur Initialisierung der Datenbanksitzung, z.B. ``["set datestyle to german", ...]`` * ``reset``: wie Verbindungen zurückgesetzt werden sollen, bevor sie wieder in den Verbindungspool zurückgegeben werden (``False`` oder ``None`` um mit ``begin()`` gestartete Transaktionen zurückzurollen, der Standardwert ``True`` rollt sicherheitshalber mögliche Transaktionen immer zurück) * ``failures``: eine optionale Exception-Klasse oder ein Tupel von Exceptions, bei denen die Ausfallsicherung zum Tragen kommen soll, falls die Vorgabe (OperationalError, InterfaceError, InternalError) für das verwendete Datenbankadapter-Modul nicht geeignet sein sollte * ``ping``: mit diesem Parameter kann eingestellt werden, wann Verbindungen mit der ``ping()``-Methode geprüft werden, falls eine solche vorhanden ist (``0`` = ``None`` = nie, ``1`` = Standardwert = immer wenn neu angefragt, ``2`` = vor Erzeugen eines Cursors, ``4`` = vor dem Ausführen von Abfragen, ``7`` = immer, und alle Bitkombinationen dieser Werte) * Die als ``creator`` angegebene Funktion oder die Funktion ``connect`` des DB-API-2-Datenbankadapter-Moduls erhalten alle weiteren Parameter, wie ``host``, ``database``, ``user``, ``password`` usw. Sie können einige oder alle dieser Parameter in Ihrer eigenen ``creator``-Funktion setzen, was ausgefeilte Mechanismen zur Ausfallsicherung und Lastverteilung ermöglicht. Wenn Sie beispielsweise ``pgdb`` als DB-API-2-Datenbankadapter benutzen, und einen Pool von mindestens fünf Datenbankverbindungen zu Ihrer Datenbank ``meinedb`` verwenden möchten, dann sieht die Initialisierung so aus:: import pgdb # importiere das verwendete DB-API-2-Modul from dbutils.pooled_db import PooledDB pool = PooledDB(pgdb, 5, database='meinedb') Nachdem Sie den Pool für Datenbankverbindungen so eingerichtet haben, können Sie Verbindungen daraus wie folgt anfordern:: db = pool.connection() Sie können diese Verbindungen verwenden, als wären sie gewöhnliche DB-API-2-Datenbankverbindungen. Genauer genommen erhalten Sie die "gehärtete" ``steady_db``-Version der zugrunde liegenden DB-API-2-Verbindung. Bitte beachten Sie, dass die Verbindung von anderen Threads mitgenutzt werden kann, wenn Sie den Parameter ``maxshared`` auf einen Wert größer als Null gesetzt haben, und der zugrunde liegende DB-API-2-Datenbankadapter dies erlaubt. Eine dedizierte Datenbankverbindung, die garantiert nicht von anderen Threads mitgenutzt wird, fordern Sie wie folgt an:: db = pool.connection(shareable=False) Stattdessen können Sie eine dedizierte Verbindung auch wie folgt erhalten:: db = pool.dedicated_connection() Wenn Sie die Datenbankverbindung nicht mehr benötigen, sollten Sie diese sofort wieder mit ``db.close()`` an den Pool zurückgeben. Sie können auf die gleiche Weise eine neue Verbindung erhalten. *Warnung:* In einer Multithread-Umgebung benutzen Sie niemals:: pool.connection().cursor().execute(...) Dies würde die Datenbankverbindung zu früh zur Wiederverwendung zurückgeben, was fatale Folgen haben könnte, wenn die Verbindungen nicht thread-sicher sind. Stellen Sie sicher, dass die Verbindungsobjekte so lange vorhanden sind, wie sie gebraucht werden, etwa so:: db = pool.connection() cur = db.cursor() cur.execute(...) res = cur.fetchone() cur.close() # oder del cur db.close() # oder del db Sie können dies auch durch Verwendung von Kontext-Managern vereinfachen:: with pool.connection() as db: with db.cursor() as cur: cur.execute(...) res = cur.fetchone() Bitte beachten Sie, dass Transaktionen explizit durch Aufruf der Methode ``begin()`` eingeleitet werden müssen. Hierdurch wird sichergestellt, dass die Verbindung nicht mehr mit anderen Threads geteilt wird, dass das transparente Neueröffnen von Verbindungen bis zum Ende der Transaktion ausgesetzt wird, und dass die Verbindung zurückgerollt wird, bevor sie wieder an den Verbindungspool zurückgegeben wird. Besonderheiten bei der Benutzung ================================ Manchmal möchte man Datenbankverbindung besonders vorbereiten, bevor sie von DBUtils verwendet werden, und dies ist nicht immer durch Verwendung der passenden Parameter möglich. Zum Beispiel kann es ``pyodb`` erfordern, dass man die Methode ``setencoding()`` der Datenbankverbindung aufruft. Sie können dies erreichen, indem Sie eine modifizierte Version der Funktion ``connect()`` verwenden und diese als ``creator`` (dem ersten Argument) an ``PersistentDB`` oder ``PooledDB`` übergeben, etwa so:: from pyodbc import connect from dbutils.pooled_db import PooledDB def creator(): con = connect(...) con.setdecoding(...) return con creator.dbapi = pyodbc db_pool = PooledDB(creator, mincached=5) Anmerkungen =========== Wenn Sie einen der bekannten "Object-Relational Mapper" SQLObject_ oder SQLAlchemy_ verwenden, dann benötigen Sie DBUtils nicht, denn diese haben ihre eigenen Mechanismen zum Pooling von Datenbankverbindungen eingebaut. Tatsächlich hat SQLObject 2 (SQL-API) das Pooling in eine separate Schicht ausgelagert, in der Code von DBUtils verwendet wird. Wenn Sie eine Lösung verwenden wie den Apache-Webserver mit mod_python_ oder mod_wsgi_, dann sollten Sie bedenken, dass Ihr Python-Code normalerweise im Kontext der Kindprozesse des Webservers läuft. Wenn Sie also das ``pooled_db``-Modul einsetzen, und mehrere dieser Kindprozesse laufen, dann werden Sie ebenso viele Pools mit Datenbankverbindungen erhalten. Wenn diese Prozesse viele Threads laufen lassen, dann mag dies eine sinnvoller Ansatz sein, wenn aber diese Prozesse nicht mehr als einen Worker-Thread starten, wie im Fall des Multi-Processing Moduls "prefork" für den Apache-Webserver, dann sollten Sie auf eine Middleware für das Connection-Pooling zurückgreifen, die Multi-Processing unterstützt, wie zum Beispiel pgpool_ oder pgbouncer_ für die PostgreSQL-Datenbank. Zukunft ======= Einige Ideen für zukünftige Verbesserungen: * Alternativ zur Obergrenze in der Anzahl der Nutzung einer Datenbankverbindung könnte eine maximale Lebensdauer für die Verbindung implementiert werden. * Es könnten Module ``monitor_db`` und ``monitor_pg`` hinzugefügt werden, die in einem separaten Thread ständig den "idle pool" und eventuell auch den "shared pool" bzw. die persistenten Verbindungen überwachen. Wenn eine unterbrochene Datenbankverbindung entdeckt wird, wird diese automatisch durch den Monitor-Thread wiederhergestellt. Dies ist in einem Szenario sinnvoll, bei dem die Datenbank einer Website jede Nacht neu gestartet wird. Ohne den Monitor-Thread würden die Benutzer morgens eine kleine Verzögerung bemerken, weil erst dann die unterbrochenen Datenbankverbindungen entdeckt würden und sich der Pool langsam wieder neu aufbaut. Mit dem Monitor-Thread würde dies schon während der Nacht passieren, kurz nach der Unterbrechung. Der Monitor-Thread könnte auch so konfiguriert werden, dass er überhaupt täglich den Verbindungspool erneuert, kurz bevor die Benutzer erscheinen. * Optional sollten Benutzung, schlechte Verbindungen und Überschreitung von Obergrenzen in Logs gespeichert werden können. Fehlermeldungen und Feedback ============================ Fehlermeldungen, Patches und Feedback können Sie als Issues_ oder `Pull Requests`_ auf der `GitHub-Projektseite`_ von DBUtils übermitteln. .. _GitHub-Projektseite: https://github.com/WebwareForPython/DBUtils .. _Issues: https://github.com/WebwareForPython/DBUtils/issues .. _Pull Requests: https://github.com/WebwareForPython/DBUtils/pulls Links ===== Einige Links zu verwandter und alternativer Software: * DBUtils_ * Python_ * `Webware for Python`_ Framework * Python `DB-API 2`_ * PostgreSQL_ Datenbank * PyGreSQL_ Python-Adapter for PostgreSQL * pgpool_ Middleware für Connection-Pooling mit PostgreSQL * pgbouncer_ Middleware für Connection-Pooling mit PostgreSQL * SQLObject_ Objekt-relationaler Mapper * SQLAlchemy_ Objekt-relationaler Mapper .. _DBUtils: https://github.com/WebwareForPython/DBUtils .. _Python: https://www.python.org .. _Webware for Python: https://webwareforpython.github.io/w4py/ .. _DB-API 2: https://www.python.org/dev/peps/pep-0249/ .. _The Python DB-API: http://www.linuxjournal.com/article/2605 .. _PostgresQL: https://www.postgresql.org/ .. _PyGreSQL: https://www.pygresql.org/ .. _SQLObject: http://sqlobject.org/ .. _SQLAlchemy: https://www.sqlalchemy.org .. _Apache: https://httpd.apache.org/ .. _mod_python: http://modpython.org/ .. _mod_wsgi: https://github.com/GrahamDumpleton/mod_wsgi .. _pgpool: https://www.pgpool.net/ .. _pgbouncer: https://pgbouncer.github.io/ Autoren ======= :Autor: `Christoph Zwerschke`_ :Beiträge: DBUtils benutzt Code, Anmerkungen und Vorschläge von Ian Bicking, Chuck Esterbrook (Webware for Python), Dan Green (DBTools), Jay Love, Michael Palmer, Tom Schwaller, Geoffrey Talvola, Warren Smith (DbConnectionPool), Ezio Vernacotola, Jehiah Czebotar, Matthew Harriger, Gregory Piñero und Josef van Eenbergen. .. _Christoph Zwerschke: https://github.com/Cito Copyright und Lizenz ==================== Copyright © 2005-2024 Christoph Zwerschke. Alle Rechte vorbehalten. DBUtils ist freie und quelloffene Software, lizenziert unter der `MIT-Lizenz`__. __ https://opensource.org/licenses/MIT WebwareForPython-DBUtils-ed2a1f2/docs/main.html000066400000000000000000001001351457556542700215370ustar00rootroot00000000000000 DBUtils User's Guide

DBUtils User's Guide

Version:
3.1.0
Translations:

English | German

Synopsis

DBUtils is a suite of Python modules allowing to connect in a safe and efficient way between a threaded Python application and a database.

DBUtils has been originally written particularly for Webware for Python as the application and PyGreSQL as the adapter to a PostgreSQL database, but it can meanwhile be used for any other Python application and DB-API 2 conformant database adapter.

Modules

The DBUtils suite is realized as a Python package containing two subsets of modules, one for use with arbitrary DB-API 2 modules, the other one for use with the classic PyGreSQL module.

Universal DB-API 2 variant

steady_db

Hardened DB-API 2 connections

pooled_db

Pooling for DB-API 2 connections

persistent_db

Persistent DB-API 2 connections

simple_pooled_db

Simple pooling for DB-API 2

Classic PyGreSQL variant

steady_pg

Hardened classic PyGreSQL connections

pooled_pg

Pooling for classic PyGreSQL connections

persistent_pg

Persistent classic PyGreSQL connections

simple_pooled_pg

Simple pooling for classic PyGreSQL

The dependencies of the modules in the universal DB-API 2 variant are as indicated in the following diagram:

dependencies_db.png

The dependencies of the modules in the classic PyGreSQL variant are similar:

dependencies_pg.png

Download

You can download the actual version of DBUtils from the Python Package Index at:

https://pypi.python.org/pypi/DBUtils

The source code repository can be found here on GitHub:

https://github.com/WebwareForPython/DBUtils

Installation

Installation

The package can be installed in the usual way:

python setup.py install

It is even easier to download and install the package in one go using pip:

pip install DBUtils

Requirements

DBUtils supports Python versions 3.7 to 3.12.

The modules in the classic PyGreSQL variant need PyGreSQL version 4.0 or above, while the modules in the universal DB-API 2 variant run with any Python DB-API 2 compliant database interface module.

Functionality

This section will refer to the names in the DB-API 2 variant only, but the same applies to the classic PyGreSQL variant.

DBUtils installs itself as a package dbutils containing all the modules that are described in this guide. Each of these modules contains essentially one class with an analogous name that provides the corresponding functionality. For instance, the module dbutils.pooled_db contains the class PooledDB.

SimplePooledDB (simple_pooled_db)

The class SimplePooledDB in dbutils.simple_pooled_db is a very basic reference implementation of a pooled database connection. It is much less sophisticated than the regular pooled_db module and is particularly lacking the failover functionality. dbutils.simple_pooled_db is essentially the same as the MiscUtils.DBPool module that is part of Webware for Python. You should consider it a demonstration of concept rather than something that should go into production.

SteadyDBConnection (steady_db)

The class SteadyDBConnection in the module dbutils.steady_db implements "hardened" connections to a database, based on ordinary connections made by any DB-API 2 database module. A "hardened" connection will transparently reopen upon access when it has been closed or the database connection has been lost or when it is used more often than an optional usage limit.

A typical example where this is needed is when the database has been restarted while your application is still running and has open connections to the database, or when your application accesses a remote database in a network that is separated by a firewall and the firewall has been restarted and lost its state.

Usually, you will not use the steady_db module directly; it merely serves as a basis for the next two modules, persistent_db and Pooled_db.

PersistentDB (persistent_db)

The class PersistentDB in the module dbutils.persistent_db implements steady, thread-affine, persistent connections to a database, using any DB-API 2 database module. "Thread-affine" and "persistent" means that the individual database connections stay assigned to the respective threads and will not be closed during the lifetime of the threads.

The following diagram shows the connection layers involved when you are using persistent_db connections:

persistent.png

Whenever a thread opens a database connection for the first time, a new connection to the database will be opened that will be used from now on for this specific thread. When the thread closes the database connection, it will still be kept open so that the next time when a connection is requested by the same thread, this already opened connection can be used. The connection will be closed automatically when the thread dies.

In short: persistent_db tries to recycle database connections to increase the overall database access performance of your threaded application, but it makes sure that connections are never shared between threads.

Therefore, persistent_db will work perfectly even if the underlying DB-API module is not thread-safe at the connection level, and it will avoid problems when other threads change the database session or perform transactions spreading over more than one SQL command.

PooledDB (pooled_db)

The class PooledDB in the module dbutils.pooled_db implements a pool of steady, thread-safe cached connections to a database which are transparently reused, using any DB-API 2 database module.

The following diagram shows the connection layers involved when you are using pooled_db connections:

pooled.png

As the diagram indicates, pooled_db can share opened database connections between different threads. This will happen by default if you set up the connection pool with a positive value of maxshared and the underlying DB-API 2 is thread-safe at the connection level, but you can also request dedicated database connections that will not be shared between threads. Besides the pool of shared connections, you can also set up a pool of at least mincached and at the most maxcached idle connections that will be used whenever a thread is requesting a dedicated database connection or the pool of shared connections is not yet full. When a thread closes a connection that is not shared anymore, it is returned back to the pool of idle connections so that it can be recycled again.

If the underlying DB-API module is not thread-safe, thread locks will be used to ensure that the pooled_db connections are thread-safe. So you don't need to worry about that, but you should be careful to use dedicated connections whenever you change the database session or perform transactions spreading over more than one SQL command.

Which one to use?

Both persistent_db and pooled_db serve the same purpose to improve the database access performance by recycling database connections, while preserving stability even if database connection will be disrupted.

So which of these two modules should you use? From the above explanations it is clear that persistent_db will make more sense if your application keeps a constant number of threads which frequently use the database. In this case, you will always have the same amount of open database connections. However, if your application frequently starts and ends threads, then it will be better to use pooled_db. The latter will also allow more fine-tuning, particularly if you are using a thread-safe DB-API 2 module.

Since the interface of both modules is similar, you can easily switch from one to the other and check which one will suit better.

Usage

The usage of all the modules is similar, but there are also some differences in the initialization between the "Pooled" and "Persistent" variants and also between the universal DB-API 2 and the classic PyGreSQL variants.

We will cover here only the persistent_db module and the more complex pooled_db module. For the details of the other modules, have a look at their module docstrings. Using the Python interpreter console, you can display the documentation of the pooled_db module as follows (this works analogously for the other modules):

help(pooled_db)

PersistentDB (persistent_db)

In order to make use of the persistent_db module, you first need to set up a generator for your kind of database connections by creating an instance of persistent_db, passing the following parameters:

  • creator: either an arbitrary function returning new DB-API 2 connection objects or a DB-API 2 compliant database module

  • maxusage: the maximum number of reuses of a single connection (the default of 0 or None means unlimited reuse)

    Whenever the limit is reached, the connection will be reset.

  • setsession: an optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to german", ...]

  • failures: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, if the default (OperationalError, InterfaceError, InternalError) is not adequate for the used database module

  • ping: an optional flag controlling when connections are checked with the ping() method if such a method is available (0 = None = never, 1 = default = whenever it is requested, 2 = when a cursor is created, 4 = when a query is executed, 7 = always, and all other bit combinations of these values)

  • closeable: if this is set to true, then closing connections will be allowed, but by default this will be silently ignored

  • threadlocal: an optional class for representing thread-local data that will be used instead of our Python implementation (threading.local is faster, but cannot be used in all cases)

  • The creator function or the connect function of the DB-API 2 compliant database module specified as the creator will receive any additional parameters such as the host, database, user, password etc. You may choose some or all of these parameters in your own creator function, allowing for sophisticated failover and load-balancing mechanisms.

For instance, if you are using pgdb as your DB-API 2 database module and want every connection to your local database mydb to be reused 1000 times:

import pgdb  # import used DB-API 2 module
from dbutils.persistent_db import PersistentDB
persist = PersistentDB(pgdb, 1000, database='mydb')

Once you have set up the generator with these parameters, you can request database connections of that kind:

db = persist.connection()

You can use these connections just as if they were ordinary DB-API 2 connections. Actually what you get is the hardened steady_db version of the underlying DB-API 2 connection.

Closing a persistent connection with db.close() will be silently ignored since it would be reopened at the next usage anyway and contrary to the intent of having persistent connections. Instead, the connection will be automatically closed when the thread dies. You can change this behavior by setting the closeable parameter.

Note that you need to explicitly start transactions by calling the begin() method. This ensures that the transparent reopening will be suspended until the end of the transaction, and that the connection will be rolled back before being reused by the same thread.

By setting the threadlocal parameter to threading.local, getting connections may become a bit faster, but this may not work in all environments (for instance, mod_wsgi is known to cause problems since it clears the threading.local data between requests).

PooledDB (pooled_db)

In order to make use of the pooled_db module, you first need to set up the database connection pool by creating an instance of pooled_db, passing the following parameters:

  • creator: either an arbitrary function returning new DB-API 2 connection objects or a DB-API 2 compliant database module

  • mincached : the initial number of idle connections in the pool (the default of 0 means no connections are made at startup)

  • maxcached: the maximum number of idle connections in the pool (the default value of 0 or None means unlimited pool size)

  • maxshared: maximum number of shared connections allowed (the default value of 0 or None means all connections are dedicated)

    When this maximum number is reached, connections are shared if they have been requested as shareable.

  • maxconnections: maximum number of connections generally allowed (the default value of 0 or None means any number of connections)

  • blocking: determines behavior when exceeding the maximum

    If this is set to true, block and wait until the number of connections decreases, but by default an error will be reported.

  • maxusage: maximum number of reuses of a single connection (the default of 0 or None means unlimited reuse)

    When this maximum usage number of the connection is reached, the connection is automatically reset (closed and reopened).

  • setsession: an optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to german", ...]

  • reset: how connections should be reset when returned to the pool (False or None to rollback transactions started with begin(), the default value True always issues a rollback for safety's sake)

  • failures: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, if the default (OperationalError, InterfaceError, InternalError) is not adequate for the used database module

  • ping: an optional flag controlling when connections are checked with the ping() method if such a method is available (0 = None = never, 1 = default = whenever fetched from the pool, 2 = when a cursor is created, 4 = when a query is executed, 7 = always, and all other bit combinations of these values)

  • The creator function or the connect function of the DB-API 2 compliant database module specified as the creator will receive any additional parameters such as the host, database, user, password etc. You may choose some or all of these parameters in your own creator function, allowing for sophisticated failover and load-balancing mechanisms.

For instance, if you are using pgdb as your DB-API 2 database module and want a pool of at least five connections to your local database mydb:

import pgdb  # import used DB-API 2 module
from dbutils.pooled_db import PooledDB
pool = PooledDB(pgdb, 5, database='mydb')

Once you have set up the connection pool you can request database connections from that pool:

db = pool.connection()

You can use these connections just as if they were ordinary DB-API 2 connections. Actually what you get is the hardened steady_db version of the underlying DB-API 2 connection.

Please note that the connection may be shared with other threads by default if you set a non-zero maxshared parameter and the DB-API 2 module allows this. If you want to have a dedicated connection, use:

db = pool.connection(shareable=False)

Instead of this, you can also get a dedicated connection as follows:

db = pool.dedicated_connection()

If you don't need it anymore, you should immediately return it to the pool with db.close(). You can get another connection in the same way.

Warning: In a threaded environment, never do the following:

pool.connection().cursor().execute(...)

This would release the connection too early for reuse which may be fatal if the connections are not thread-safe. Make sure that the connection object stays alive as long as you are using it, like that:

db = pool.connection()
cur = db.cursor()
cur.execute(...)
res = cur.fetchone()
cur.close()  # or del cur
db.close()  # or del db

You can also use context managers for simpler code:

with pool.connection() as db:
    with db.cursor() as cur:
        cur.execute(...)
        res = cur.fetchone()

Note that you need to explicitly start transactions by calling the begin() method. This ensures that the connection will not be shared with other threads, that the transparent reopening will be suspended until the end of the transaction, and that the connection will be rolled back before being given back to the connection pool.

Advanced Usage

Sometimes you may want to prepare connections before they are used by DBUtils, in ways that are not possible by just using the right parameters. For instance, pyodbc may require to configure connections by calling the setencoding() method of the connection. You can do this by passing a modified connect() function to PersistentDB or PooledDB as creator (the first argument), like this:

from pyodbc import connect
from dbutils.pooled_db import PooledDB

def creator():
    con = connect(...)
    con.setdecoding(...)
    return con

creator.dbapi = pyodbc

db_pool = PooledDB(creator, mincached=5)

Notes

If you are using one of the popular object-relational mappers SQLObject or SQLAlchemy, you won't need DBUtils, since they come with their own connection pools. SQLObject 2 (SQL-API) is actually borrowing some code from DBUtils to split the pooling out into a separate layer.

Also note that when you are using a solution like the Apache webserver with mod_python or mod_wsgi, then your Python code will be usually run in the context of the webserver's child processes. So if you are using the pooled_db module, and several of these child processes are running, you will have as much database connection pools. If these processes are running many threads, this may still be a reasonable approach, but if these processes don't spawn more than one worker thread, as in the case of Apache's "prefork" multi-processing module, this approach does not make sense. If you're running such a configuration, you should resort to a middleware for connection pooling that supports multi-processing, such as pgpool or pgbouncer for the PostgreSQL database.

Future

Some ideas for future improvements:

  • Alternatively to the maximum number of uses of a connection, implement a maximum time to live for connections.

  • Create modules monitor_db and monitor_pg that will run in a separate thread, monitoring the pool of the idle connections and maybe also the shared connections respectively the thread-affine connections. If a disrupted connection is detected, then it will be reestablished automatically by the monitoring thread. This will be useful in a scenario where a database powering a website is restarted during the night. Without the monitoring thread, the users would experience a slight delay in the next morning, because only then, the disrupted database connections will be detected and the pool will be rebuilt. With the monitoring thread, this will already happen during the night, shortly after the disruption. The monitoring thread could also be configured to generally recreate the connection pool every day shortly before the users arrive.

  • Optionally log usage, bad connections and exceeding of limits.

Bug reports and feedback

You can transmit bug reports, patches and feedback by creating issues or pull requests on the GitHub project page for DBUtils.

Credits

Author:

Christoph Zwerschke

Contributions:

DBUtils uses code, input and suggestions made by Ian Bicking, Chuck Esterbrook (Webware for Python), Dan Green (DBTools), Jay Love, Michael Palmer, Tom Schwaller, Geoffrey Talvola, Warren Smith (DbConnectionPool), Ezio Vernacotola, Jehiah Czebotar, Matthew Harriger, Gregory Piñero and Josef van Eenbergen.

WebwareForPython-DBUtils-ed2a1f2/docs/main.rst000066400000000000000000000547421457556542700214170ustar00rootroot00000000000000DBUtils User's Guide ++++++++++++++++++++ :Version: 3.1.0 :Translations: English | German_ .. _German: main.de.html .. contents:: Contents Synopsis ======== DBUtils_ is a suite of Python modules allowing to connect in a safe and efficient way between a threaded Python_ application and a database. DBUtils has been originally written particularly for `Webware for Python`_ as the application and PyGreSQL_ as the adapter to a PostgreSQL_ database, but it can meanwhile be used for any other Python application and `DB-API 2`_ conformant database adapter. Modules ======= The DBUtils suite is realized as a Python package containing two subsets of modules, one for use with arbitrary DB-API 2 modules, the other one for use with the classic PyGreSQL module. +------------------+------------------------------------------+ | Universal DB-API 2 variant | +==================+==========================================+ | steady_db | Hardened DB-API 2 connections | +------------------+------------------------------------------+ | pooled_db | Pooling for DB-API 2 connections | +------------------+------------------------------------------+ | persistent_db | Persistent DB-API 2 connections | +------------------+------------------------------------------+ | simple_pooled_db | Simple pooling for DB-API 2 | +------------------+------------------------------------------+ +------------------+------------------------------------------+ | Classic PyGreSQL variant | +==================+==========================================+ | steady_pg | Hardened classic PyGreSQL connections | +------------------+------------------------------------------+ | pooled_pg | Pooling for classic PyGreSQL connections | +------------------+------------------------------------------+ | persistent_pg | Persistent classic PyGreSQL connections | +------------------+------------------------------------------+ | simple_pooled_pg | Simple pooling for classic PyGreSQL | +------------------+------------------------------------------+ The dependencies of the modules in the universal DB-API 2 variant are as indicated in the following diagram: .. image:: dependencies_db.png The dependencies of the modules in the classic PyGreSQL variant are similar: .. image:: dependencies_pg.png Download ======== You can download the actual version of DBUtils from the Python Package Index at:: https://pypi.python.org/pypi/DBUtils The source code repository can be found here on GitHub:: https://github.com/WebwareForPython/DBUtils Installation ============ Installation ------------ The package can be installed in the usual way:: python setup.py install It is even easier to download and install the package in one go using `pip`_:: pip install DBUtils .. _pip: https://pip.pypa.io/ Requirements ============ DBUtils supports Python_ versions 3.7 to 3.12. The modules in the classic PyGreSQL variant need PyGreSQL_ version 4.0 or above, while the modules in the universal DB-API 2 variant run with any Python `DB-API 2`_ compliant database interface module. Functionality ============= This section will refer to the names in the DB-API 2 variant only, but the same applies to the classic PyGreSQL variant. DBUtils installs itself as a package ``dbutils`` containing all the modules that are described in this guide. Each of these modules contains essentially one class with an analogous name that provides the corresponding functionality. For instance, the module ``dbutils.pooled_db`` contains the class ``PooledDB``. SimplePooledDB (simple_pooled_db) --------------------------------- The class ``SimplePooledDB`` in ``dbutils.simple_pooled_db`` is a very basic reference implementation of a pooled database connection. It is much less sophisticated than the regular ``pooled_db`` module and is particularly lacking the failover functionality. ``dbutils.simple_pooled_db`` is essentially the same as the ``MiscUtils.DBPool`` module that is part of Webware for Python. You should consider it a demonstration of concept rather than something that should go into production. SteadyDBConnection (steady_db) ------------------------------ The class ``SteadyDBConnection`` in the module ``dbutils.steady_db`` implements "hardened" connections to a database, based on ordinary connections made by any DB-API 2 database module. A "hardened" connection will transparently reopen upon access when it has been closed or the database connection has been lost or when it is used more often than an optional usage limit. A typical example where this is needed is when the database has been restarted while your application is still running and has open connections to the database, or when your application accesses a remote database in a network that is separated by a firewall and the firewall has been restarted and lost its state. Usually, you will not use the ``steady_db`` module directly; it merely serves as a basis for the next two modules, ``persistent_db`` and ``Pooled_db``. PersistentDB (persistent_db) ---------------------------- The class ``PersistentDB`` in the module ``dbutils.persistent_db`` implements steady, thread-affine, persistent connections to a database, using any DB-API 2 database module. "Thread-affine" and "persistent" means that the individual database connections stay assigned to the respective threads and will not be closed during the lifetime of the threads. The following diagram shows the connection layers involved when you are using ``persistent_db`` connections: .. image:: persistent.png Whenever a thread opens a database connection for the first time, a new connection to the database will be opened that will be used from now on for this specific thread. When the thread closes the database connection, it will still be kept open so that the next time when a connection is requested by the same thread, this already opened connection can be used. The connection will be closed automatically when the thread dies. In short: ``persistent_db`` tries to recycle database connections to increase the overall database access performance of your threaded application, but it makes sure that connections are never shared between threads. Therefore, ``persistent_db`` will work perfectly even if the underlying DB-API module is not thread-safe at the connection level, and it will avoid problems when other threads change the database session or perform transactions spreading over more than one SQL command. PooledDB (pooled_db) -------------------- The class ``PooledDB`` in the module ``dbutils.pooled_db`` implements a pool of steady, thread-safe cached connections to a database which are transparently reused, using any DB-API 2 database module. The following diagram shows the connection layers involved when you are using ``pooled_db`` connections: .. image:: pooled.png As the diagram indicates, ``pooled_db`` can share opened database connections between different threads. This will happen by default if you set up the connection pool with a positive value of ``maxshared`` and the underlying DB-API 2 is thread-safe at the connection level, but you can also request dedicated database connections that will not be shared between threads. Besides the pool of shared connections, you can also set up a pool of at least ``mincached`` and at the most ``maxcached`` idle connections that will be used whenever a thread is requesting a dedicated database connection or the pool of shared connections is not yet full. When a thread closes a connection that is not shared anymore, it is returned back to the pool of idle connections so that it can be recycled again. If the underlying DB-API module is not thread-safe, thread locks will be used to ensure that the ``pooled_db`` connections are thread-safe. So you don't need to worry about that, but you should be careful to use dedicated connections whenever you change the database session or perform transactions spreading over more than one SQL command. Which one to use? ----------------- Both ``persistent_db`` and ``pooled_db`` serve the same purpose to improve the database access performance by recycling database connections, while preserving stability even if database connection will be disrupted. So which of these two modules should you use? From the above explanations it is clear that ``persistent_db`` will make more sense if your application keeps a constant number of threads which frequently use the database. In this case, you will always have the same amount of open database connections. However, if your application frequently starts and ends threads, then it will be better to use ``pooled_db``. The latter will also allow more fine-tuning, particularly if you are using a thread-safe DB-API 2 module. Since the interface of both modules is similar, you can easily switch from one to the other and check which one will suit better. Usage ===== The usage of all the modules is similar, but there are also some differences in the initialization between the "Pooled" and "Persistent" variants and also between the universal DB-API 2 and the classic PyGreSQL variants. We will cover here only the ``persistent_db`` module and the more complex ``pooled_db`` module. For the details of the other modules, have a look at their module docstrings. Using the Python interpreter console, you can display the documentation of the ``pooled_db`` module as follows (this works analogously for the other modules):: help(pooled_db) PersistentDB (persistent_db) ---------------------------- In order to make use of the ``persistent_db`` module, you first need to set up a generator for your kind of database connections by creating an instance of ``persistent_db``, passing the following parameters: * ``creator``: either an arbitrary function returning new DB-API 2 connection objects or a DB-API 2 compliant database module * ``maxusage``: the maximum number of reuses of a single connection (the default of ``0`` or ``None`` means unlimited reuse) Whenever the limit is reached, the connection will be reset. * ``setsession``: an optional list of SQL commands that may serve to prepare the session, e.g. ``["set datestyle to german", ...]`` * ``failures``: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, if the default (OperationalError, InterfaceError, InternalError) is not adequate for the used database module * ``ping``: an optional flag controlling when connections are checked with the ``ping()`` method if such a method is available (``0`` = ``None`` = never, ``1`` = default = whenever it is requested, ``2`` = when a cursor is created, ``4`` = when a query is executed, ``7`` = always, and all other bit combinations of these values) * ``closeable``: if this is set to true, then closing connections will be allowed, but by default this will be silently ignored * ``threadlocal``: an optional class for representing thread-local data that will be used instead of our Python implementation (threading.local is faster, but cannot be used in all cases) * The creator function or the connect function of the DB-API 2 compliant database module specified as the creator will receive any additional parameters such as the host, database, user, password etc. You may choose some or all of these parameters in your own creator function, allowing for sophisticated failover and load-balancing mechanisms. For instance, if you are using ``pgdb`` as your DB-API 2 database module and want every connection to your local database ``mydb`` to be reused 1000 times:: import pgdb # import used DB-API 2 module from dbutils.persistent_db import PersistentDB persist = PersistentDB(pgdb, 1000, database='mydb') Once you have set up the generator with these parameters, you can request database connections of that kind:: db = persist.connection() You can use these connections just as if they were ordinary DB-API 2 connections. Actually what you get is the hardened ``steady_db`` version of the underlying DB-API 2 connection. Closing a persistent connection with ``db.close()`` will be silently ignored since it would be reopened at the next usage anyway and contrary to the intent of having persistent connections. Instead, the connection will be automatically closed when the thread dies. You can change this behavior by setting the ``closeable`` parameter. Note that you need to explicitly start transactions by calling the ``begin()`` method. This ensures that the transparent reopening will be suspended until the end of the transaction, and that the connection will be rolled back before being reused by the same thread. By setting the ``threadlocal`` parameter to ``threading.local``, getting connections may become a bit faster, but this may not work in all environments (for instance, ``mod_wsgi`` is known to cause problems since it clears the ``threading.local`` data between requests). PooledDB (pooled_db) -------------------- In order to make use of the ``pooled_db`` module, you first need to set up the database connection pool by creating an instance of ``pooled_db``, passing the following parameters: * ``creator``: either an arbitrary function returning new DB-API 2 connection objects or a DB-API 2 compliant database module * ``mincached`` : the initial number of idle connections in the pool (the default of ``0`` means no connections are made at startup) * ``maxcached``: the maximum number of idle connections in the pool (the default value of ``0`` or ``None`` means unlimited pool size) * ``maxshared``: maximum number of shared connections allowed (the default value of ``0`` or ``None`` means all connections are dedicated) When this maximum number is reached, connections are shared if they have been requested as shareable. * ``maxconnections``: maximum number of connections generally allowed (the default value of ``0`` or ``None`` means any number of connections) * ``blocking``: determines behavior when exceeding the maximum If this is set to true, block and wait until the number of connections decreases, but by default an error will be reported. * ``maxusage``: maximum number of reuses of a single connection (the default of ``0`` or ``None`` means unlimited reuse) When this maximum usage number of the connection is reached, the connection is automatically reset (closed and reopened). * ``setsession``: an optional list of SQL commands that may serve to prepare the session, e.g. ``["set datestyle to german", ...]`` * ``reset``: how connections should be reset when returned to the pool (``False`` or ``None`` to rollback transactions started with ``begin()``, the default value ``True`` always issues a rollback for safety's sake) * ``failures``: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, if the default (OperationalError, InterfaceError, InternalError) is not adequate for the used database module * ``ping``: an optional flag controlling when connections are checked with the ``ping()`` method if such a method is available (``0`` = ``None`` = never, ``1`` = default = whenever fetched from the pool, ``2`` = when a cursor is created, ``4`` = when a query is executed, ``7`` = always, and all other bit combinations of these values) * The creator function or the connect function of the DB-API 2 compliant database module specified as the creator will receive any additional parameters such as the host, database, user, password etc. You may choose some or all of these parameters in your own creator function, allowing for sophisticated failover and load-balancing mechanisms. For instance, if you are using ``pgdb`` as your DB-API 2 database module and want a pool of at least five connections to your local database ``mydb``:: import pgdb # import used DB-API 2 module from dbutils.pooled_db import PooledDB pool = PooledDB(pgdb, 5, database='mydb') Once you have set up the connection pool you can request database connections from that pool:: db = pool.connection() You can use these connections just as if they were ordinary DB-API 2 connections. Actually what you get is the hardened ``steady_db`` version of the underlying DB-API 2 connection. Please note that the connection may be shared with other threads by default if you set a non-zero ``maxshared`` parameter and the DB-API 2 module allows this. If you want to have a dedicated connection, use:: db = pool.connection(shareable=False) Instead of this, you can also get a dedicated connection as follows:: db = pool.dedicated_connection() If you don't need it anymore, you should immediately return it to the pool with ``db.close()``. You can get another connection in the same way. *Warning:* In a threaded environment, never do the following:: pool.connection().cursor().execute(...) This would release the connection too early for reuse which may be fatal if the connections are not thread-safe. Make sure that the connection object stays alive as long as you are using it, like that:: db = pool.connection() cur = db.cursor() cur.execute(...) res = cur.fetchone() cur.close() # or del cur db.close() # or del db You can also use context managers for simpler code:: with pool.connection() as db: with db.cursor() as cur: cur.execute(...) res = cur.fetchone() Note that you need to explicitly start transactions by calling the ``begin()`` method. This ensures that the connection will not be shared with other threads, that the transparent reopening will be suspended until the end of the transaction, and that the connection will be rolled back before being given back to the connection pool. Advanced Usage ============== Sometimes you may want to prepare connections before they are used by DBUtils, in ways that are not possible by just using the right parameters. For instance, ``pyodbc`` may require to configure connections by calling the ``setencoding()`` method of the connection. You can do this by passing a modified ``connect()`` function to ``PersistentDB`` or ``PooledDB`` as ``creator`` (the first argument), like this:: from pyodbc import connect from dbutils.pooled_db import PooledDB def creator(): con = connect(...) con.setdecoding(...) return con creator.dbapi = pyodbc db_pool = PooledDB(creator, mincached=5) Notes ===== If you are using one of the popular object-relational mappers SQLObject_ or SQLAlchemy_, you won't need DBUtils, since they come with their own connection pools. SQLObject 2 (SQL-API) is actually borrowing some code from DBUtils to split the pooling out into a separate layer. Also note that when you are using a solution like the Apache webserver with mod_python_ or mod_wsgi_, then your Python code will be usually run in the context of the webserver's child processes. So if you are using the ``pooled_db`` module, and several of these child processes are running, you will have as much database connection pools. If these processes are running many threads, this may still be a reasonable approach, but if these processes don't spawn more than one worker thread, as in the case of Apache's "prefork" multi-processing module, this approach does not make sense. If you're running such a configuration, you should resort to a middleware for connection pooling that supports multi-processing, such as pgpool_ or pgbouncer_ for the PostgreSQL database. Future ====== Some ideas for future improvements: * Alternatively to the maximum number of uses of a connection, implement a maximum time to live for connections. * Create modules ``monitor_db`` and ``monitor_pg`` that will run in a separate thread, monitoring the pool of the idle connections and maybe also the shared connections respectively the thread-affine connections. If a disrupted connection is detected, then it will be reestablished automatically by the monitoring thread. This will be useful in a scenario where a database powering a website is restarted during the night. Without the monitoring thread, the users would experience a slight delay in the next morning, because only then, the disrupted database connections will be detected and the pool will be rebuilt. With the monitoring thread, this will already happen during the night, shortly after the disruption. The monitoring thread could also be configured to generally recreate the connection pool every day shortly before the users arrive. * Optionally log usage, bad connections and exceeding of limits. Bug reports and feedback ======================== You can transmit bug reports, patches and feedback by creating issues_ or `pull requests`_ on the GitHub project page for DBUtils. .. _GitHub-Projektseite: https://github.com/WebwareForPython/DBUtils .. _Issues: https://github.com/WebwareForPython/DBUtils/issues .. _Pull Requests: https://github.com/WebwareForPython/DBUtils/pulls Links ===== Some links to related and alternative software: * DBUtils_ * Python_ * `Webware for Python`_ framework * Python `DB-API 2`_ * PostgreSQL_ database * PyGreSQL_ Python adapter for PostgreSQL * pgpool_ middleware for PostgreSQL connection pooling * pgbouncer_ lightweight PostgreSQL connection pooling * SQLObject_ object-relational mapper * SQLAlchemy_ object-relational mapper .. _DBUtils: https://github.com/WebwareForPython/DBUtils .. _Python: https://www.python.org .. _Webware for Python: https://webwareforpython.github.io/w4py/ .. _Webware for Python mailing list: https://lists.sourceforge.net/lists/listinfo/webware-discuss .. _DB-API 2: https://www.python.org/dev/peps/pep-0249/ .. _The Python DB-API: http://www.linuxjournal.com/article/2605 .. _PostgresQL: https://www.postgresql.org/ .. _PyGreSQL: https://www.pygresql.org/ .. _SQLObject: http://www.sqlobject.org/ .. _SQLAlchemy: https://www.sqlalchemy.org .. _Apache: https://httpd.apache.org/ .. _mod_python: http://modpython.org/ .. _mod_wsgi: https://github.com/GrahamDumpleton/mod_wsgi .. _pgpool: https://www.pgpool.net/ .. _pgbouncer: https://pgbouncer.github.io/ Credits ======= :Author: `Christoph Zwerschke`_ :Contributions: DBUtils uses code, input and suggestions made by Ian Bicking, Chuck Esterbrook (Webware for Python), Dan Green (DBTools), Jay Love, Michael Palmer, Tom Schwaller, Geoffrey Talvola, Warren Smith (DbConnectionPool), Ezio Vernacotola, Jehiah Czebotar, Matthew Harriger, Gregory Piñero and Josef van Eenbergen. .. _Christoph Zwerschke: https://github.com/Cito Copyright and License ===================== Copyright © 2005-2024 by Christoph Zwerschke. All Rights Reserved. DBUtils is free and open source software, licensed under the `MIT license`__. __ https://opensource.org/licenses/MIT WebwareForPython-DBUtils-ed2a1f2/docs/make.py000077500000000000000000000017621457556542700212250ustar00rootroot00000000000000#!/usr/bin/python3.11 """Build HTML from reST files.""" from pathlib import Path from docutils.core import publish_file print("Creating the documentation...") for rst_file in Path().glob('*.rst'): rst_path = Path(rst_file) name = Path(rst_file).stem lang = Path(name).suffix if lang.startswith('.'): lang = lang[1:] if lang == 'zh': lang = 'zh_cn' else: lang = 'en' html_path = Path(name + '.html') print(name, lang) with rst_path.open(encoding='utf-8-sig') as source, \ html_path.open('w', encoding='utf-8') as destination: output = publish_file( writer_name='html5', source=source, destination=destination, enable_exit_status=True, settings_overrides={ "stylesheet_path": 'doc.css', "embed_stylesheet": False, "toc_backlinks": False, "language_code": lang, "exit_status_level": 2}) print("Done.") WebwareForPython-DBUtils-ed2a1f2/docs/persistent.png000066400000000000000000000155761457556542700226510ustar00rootroot00000000000000‰PNG  IHDRšê úxšPLTE@€ Àÿ@@@@€@ @À@ÿ€€@€€€ €À€ÿ  @ €   À ÿÀÀ@À€À ÀÀÀÿÿÿ@ÿ€ÿ ÿÀÿÿ@@@@€@ @À@ÿ@@@@@@@€@@ @@À@@ÿ@€@€@@€€@€ @€À@€ÿ@ @ @@ €@  @ À@ ÿ@À@À@@À€@À @ÀÀ@Àÿ@ÿ@ÿ@@ÿ€@ÿ @ÿÀ@ÿÿ€€@€€€ €À€ÿ€@€@@€@€€@ €@À€@ÿ€€€€@€€€€€ €€À€€ÿ€ € @€ €€  € À€ ÿ€À€À@€À€€À €ÀÀ€Àÿ€ÿ€ÿ@€ÿ€€ÿ €ÿÀ€ÿÿ  @ €   À ÿ @ @@ @€ @  @À @ÿ € €@ €€ €  €À €ÿ    @  €     À  ÿ À À@ À€ À  ÀÀ Àÿ ÿ ÿ@ ÿ€ ÿ  ÿÀ ÿÿÀÀ@À€À ÀÀÀÿÀ@À@@À@€À@ À@ÀÀ@ÿÀ€À€@À€€À€ À€ÀÀ€ÿÀ À @À €À  À ÀÀ ÿÀÀÀÀ@ÀÀ€ÀÀ ÀÀÀÀÀÿÀÿÀÿ@Àÿ€Àÿ ÀÿÀÀÿÿÿÿ@ÿ€ÿ ÿÀÿÿÿ@ÿ@@ÿ@€ÿ@ ÿ@Àÿ@ÿÿ€ÿ€@ÿ€€ÿ€ ÿ€Àÿ€ÿÿ ÿ @ÿ €ÿ  ÿ Àÿ ÿÿÀÿÀ@ÿÀ€ÿÀ ÿÀÀÿÀÿÿÿÿÿ@ÿÿ€ÿÿ ÿÿÀÿÿÿ 000PPP```pppªʲbKGDˆH pHYs  šœ}IDATxÚí1“ã¶’ÇA%–^à§A)ðÎ&{J¼Š&Ól õUÙÙ»äªø´/÷‹îj‚;f¾dm]⋞ù! å„Db—C7ŠÒHiD‚ õïªQ$Å&ðC£A²Ùâ©TÐ@€h @4 M/ЈO²ƒmÔ¿ž h€4@ ÍÉ QE8Hìš)/哤òí°“Çê·‡øèž›IrðH›I4G$ßÍÕGúM`ë(§ZäJÎÕº<8QwùCtôÛ^ ˜Æg­šjýüDhž>L·Vo¦¥Åœ¶š;Ùµ¡=ˆ¬#÷Ìb¼³šÉ?T«ÿÔ¶¢þòà·Pq6ý¯…¢|ò«s™Ý}7ˆ µ^Õf*„)pB|£ì$_ˆA\~#ÑÛ‹/? 1–òó?èWÙ`(Ddö(´¥S-DjIÐT¬æ·/‘|+ë VœO»¤½CþE>’Q¡Ö¤A\¬îåz” þÆÆ£·Ëå )Ây± ÔÆ™>.ýF—öȈ:ëãýIƒjA4[« ’õ\>ÍÕ§µšŠý¨º1í“M"ö ÆŸ“8%ôö)BûmÛ·)fÊÒ‘TÌäÓX—ö²{„ñVío5ÍÖj’üî·O;¶Â¾f3M¬¯±ÛTo$‚Gþ ¢¯Cö5¯ŒÒaRú½½P6&³Ño+úêãÒox\Æ{X}Š—âhtMÕç{ŸTl¥º$·¼l›Î'qÅNŒ/¯XÙn­†­w­†÷˜ìXMiC@Sµù$fôIþâk íç‘ypÃVÃת–Õ§ò òëkÉ×/b|Ù.—#Z[¬Ø;éëíkâLïA„6VK¾Æ´ ©ø5ºbB§Ÿ¨+Q ^ˆ¹¹¶áöÌc*EHˆŠ’ê·¾Õ#4ÅAù$ñÁø½½XýÁ#®ÅŸ4P³6¨Gh µÇ"²Zì VãHØ×¨U„5¾¡Yé‹”%Ðx'Öjî4@Ð ¤.4_kûÕÜP¥¥Aa€¥ñ¦ ¢Wtúƒ†¹ ñ“Íör hüBÓ/2@4@sËãM  y \r Ð\Êh€h€¦/h$Р0ÎJs4¨ëÛX¿ƒFB¼3~ ñ h ·:¨Ð ÄãKÎ7?œ¼°Ð’:çújEãÖj«s®¯ÎKN   švÐp.?!‚ÿœœL%k“öœÈ8{šÌ¨û÷sò=›œZ× ¹H_&D{e5œ{ùdî°bb6æ‹ ¹ÖjXÓ9ùž)åV»ÒjÎÖGyAgRm [Âfz†Õ¬Å»éÕh8·ß9ùžS΢]‹æl}JÞX>÷Ÿ©÷Èß-ƒd›k™2+§B|k¬æ÷äDÎÄˬæµ|϶Ö¨«9S_:”ÞYMŽÉš³ð^Ú\ËÔŸ1åM^uùš²%¼’ïÙî¼%õXÍYú²ÐK_£ó"›<Ì”óÕØw6¬øšS}öeVóJ¾gKfpvUÕ¢Ï[_cs1ë\ËüO´çn>‹ŸâŒ¯BcµM^Ѷóµû•hò‘¾Óñò®O~WýÌF2ÿËt~Iõå º¯Pý_ë%ç:*kå™Z*=MûyF­M³joãb%ø©ÉçßèTñ÷ä4'µ=W´éö~TÛÙhþ'.d<—×Ýwšqw=Wç¡ÖüE.…y~VPù>'T_± »‹E¸ý_'>Ÿ?÷£:[EüLC-}޳÷¿ê5Öò?źrߎ樶Šmzß§+ï¡ñ9?ñ½±| iÆÐ ³*¾Dù€˜©5Ó¸Ó;f6-ܨ±½ÕbS—,fÜçyT]ÉæN¯)}ΧD·å·£ÙöIåØë˜ú¹»_·ú¥sù:»9÷|1H艳øÈg°Ž6ʽg…óíÝfjkÐXkW甎ËÚËFOÊ]¿ÿ:Þñ—ÚjæW 9_›ñFëùuhÊzOgÊ›Ø3HÇ+åòï*kô>ã³[ž 4d ôì¢XEù„æÅä3-~x§|õÝÿÑ ¿Ñ¶'òxs^‡¦´e¬í>§cïh«è/ŸÙ=Ý_‡†Î™Ÿ*/©ï,–º¿,Þý@Ïf~U½©Y#Ÿ–_"ýç ù%¦(&Õ“°Ÿ™v¶s‹ã5ùbX±šÏW V±~3>¡úþ÷5R>\; X›©Õçü166ò4bßÃkŠÇ+Þ§ø{ìšüýEÚ²3{˜c}ÿì"méÇkÏÙe ³™ôÍÑÞõ0Ès¯ÉŽ©Kǵh;ÿ’ói~ÂâNú„7j»Q4@4@sshð´ó7 ‘ÙÉ[ \…4†Gw YÉ0=+ĩՔï™Gƨ1ÇhøùŽÜQÉa5í ÙÉËp2酪KÎm&€É½Ì@o”fCTŸ[4ÚjL&Š»RßÍ»½ôNó€}M¾˜£îZ°š½÷±«™âµy[õrt\[M°— x‘ @ÛP:€³iÃ×PþŒÝLÉÀ¾†zºìT.*Hc¾fk5•L5™BŒÐZAÐ h€âšZ§3Ó:^µ^6f/¢…&ÖC@4@4@4@4@4@4@4@4@ÓS4Bÿ gg/œâqY¼ÚÑ¡Ž)\¡ÑÊ¢qX<ÑÀÉ áCe®56„ÆmM9G#h€æ&ÐH÷hÜ;ŽFöï´utÆõÍ  ¹)4nkÊ9ÙY4¸ó\§ž‡£ŠŽŸÿ¡õ;h$Ä M?  Ð@€h @4ž ž›ž’¢Mmíh¬M#»ú¡­/ÑH     ñ Msƒg;G 44›Hä åˆ}ì(š"49¡ 1ö¢©dVîšT|˜:³šçQÂó ¸íÐÌœ>^¢YüCˆ‘jA‚{“,ä\÷ÓŸ8½ýÿ–óáÔ…æ³ÑîhûÑ&Ó'˩ͩòQëxÛ¡«A’/fT'ʯ‹¹Ìñ&¤¤~_S¬¥cV,"¹™*½‘ªš$?Ú~…f>¨Íéòe¡Ï¾f©ÚÒzœê:¡yŠIlæó²v«Yìh›ìi[×çÙÎ,ŸÏ¾†fÙðë°Úß›ù<š°šª¶ŒµÝÓ<¬gDµž_)Ÿß¾FÚÓ~;Ö34`5/ÛpXj[—3„ÔêkN–ÏÎç§Õ ¥êóu_ühûbšÏcÚˆÕ ©¿g_3‰­¯1sîL¢.9O•Oé–Ù{ÏË?…˜½35f5òÄm‡´=ñ#­ÚGh§Ê·¢^ïö’е¾ÆÙsÝâðF»òÕŽ†ûbw÷Ж®Ñ¸+_V9µš¹s«é,šúwõC[;h€¦ÝÁ3Ð @_r Do6½ ñI€h @4§hh @46ÑlLüå’?Dg>ëz)Ùùj€æ@íŧדøÌg]{H'ÀršüEÃÞ«ÓâV‹9ÆÄö¡™¿[I*lô/©-"¶Û'? 1£åÒlÔ>߆Êjþ°ÑÀ}B‹9ÿ.~¦XŸœ/8…ãTÔrQLY.èxúØü†„ˆ¦Œí¥VŸ“,œ³ d£„b‹ÕR:âx¯rûÌÆcYZD=¹%&&œc¶¦1ÅŒeþÅ6>™ú½µÚSÇ.“Õ¨?ý†=vñËl4ÒÆE’(ô戩aªÝrIV¶«v^ñ5T‹Öׯ}M¾ K/gÞàþÞΧQÎÚU[‡F3tD/æù03Q¤ÁcýƒçÓåóÚ×薳ߎí|yý#4Ò6|¡Mé±6Sû0àDùxö>Oј93¸÷ŸDå|[«©·CSm”æ2ÚUmùvfÑœ*¹›Í¿×5/ÆL¦nÄ×ì—Dež?Òjb„v´|4IIR/éìnnÔ Ð Ð Ð Ð´rÉ 4@skhÞ~Do6½ ñI€h @4§hh @4¶ÑÛgî••‘\¾qÂt/¹‰LÚ3ò‚W±š_®ÙÄ @.Ds¸Þ(~ñÕo±šÃô!ÇÑPhHTp`”“ ý½ØaW 9„†*-Hʸnjݱmëd5ù§ÿþR:}²‘ÒjfÖŽ*V”–AV£ÇU« ’—±Ã&ⲇ†ûžŽ^²õÐû*¿”¾FÊ_þín;:SžÅøš¼âkLDð`ƒÌõN¾Æø!u|ÝË%»±Ã¹Ž+CVÃ#4åéU¯¦Çir)ÄG¢ŠoÉód¢2RV}Ô÷“ƒ#´j ré¹ü.ªÚFÇXйí7·±Ã&®r¸CÃõHGѤ#T—÷и¿¢‘3µÊ7å!^ ´ƒ4 éšžMÓéõÍYMŸg}ÝV4@4@4@4@4>©úÏÝäâÂa}¹UW;žÅÔÙ¼ïZ™34NÕ‰úO¿ÖWN=RæZ]ChÜV–ì¥:  i0蔽T×  47Ʊ6Ùm4Ò5)û©h€h¼@ã¶²dOÕáγ·êDåRª“á(¢ãçhý ñG€h @ÓOB@4 Ð ¤ƒg·ùöoc>ºÐ4²«ÚÚÑx    ÏÐ\7x.&Çv±9ÔjDs0ç4ë …I¦W3š£åKËW¾¢9˜;]oYÔ7«økÚ(çmªkªf4Ç4R¦J“/Ô_«9ÒŽ×âÝ´v4ÇڰɕۈÕOŸhlØK4:wú^nô€s£Ëß›¿.4{Úvò«Ë©Í©òÉ2‡²ht>ÛÇBÕ‰ò+œ}ÀÏSN?[³¯¡ã â|íåWÏt¿òuX·¯9]¾M豯1Y æF§f\¯¯±Ú†/´±ž´>mg–Ïg_“êÜéent©s£'f†š¬^_“ê|ùG´¥6qhN—Ïú!Ÿ­æentnÁ5ûš#mXçWO˰š#å³£7ß}Í$²}qÅjó5&{™k³G5äköË—¡;o‡6wúv³Í^¿ÕÐñ?¼Ð¦ç/Xñ#­ÚGh§ÊG—œµz·„ð(ͧ5@4@4@4@s³hì4@sš7Ñ›ÍGoB| Ð Ä)Ð Ä=OíŒb«vç«Í8N%·kmŒŒy"{±ØßCÎFÃOñóÅØDZ˜ø*%Ë?GÒÆ_¤AdžöW"ŒÏ)¹Ìj¸5?L,lç›M¥-¦Vu –,#²`1.¬&0DŒ}ŒÍ~éH®æÖjŠÕÅàk«11X–ÚP0ŠÎÃ#¦ŽGÛ´m3µñ[»±Êko"ð Ç} Ó»#Úh¶=J⊿°Ñ’ãé÷j í]„qªãRMü–ÝSðžÖö  m­æÁøyjçlElBc™[c)­& Ì{.eѳÊê“c@ã­BѧÚr(º\Ñg2 «lx &úL¦Û [ô™L·ýè3™n£DŸÉtûêCô™L·/ÚEŸÉ8*@»º±îh¼Õ#úL¦Û—¢Ïdº}A-úL¦Û·¡DŸÉtûæ­è3Çïúh¢¯d:üÀ[™ hð±Cû±“Ý}¬ÊhDScr¢Z[z®*jaSw>ÒŠG¼I4ºQ ¿Ñø ÞÊ“Uá3¡Øm¢‘õ8‰ÆŒFøñúD[Oðj(cFÓTOÙ_SWáEs«¾¦64MÛ ûšZ* 94·ìkj©‚êÐ0­>_­ƒMo‡g-_õ^Æ»3ê šsš½hNœ˜agÑØ»6n¦ì9ŸzïÑ´1cRÎ\4X.Ù{iRÓŠÉ[”*@G“˜œ£5%ÜZÚrõ+qÔ7uü’¹#½š;«¯¹Ä×8´ŒÐ.¹ºFƒôÙæ6Ñàn€À·Xv?P½µDÞ¡y­`Ü×oT¡÷ƒg[±¯fzƒhpÉ 4@4@4     šöDÏè:;¸mM‹ÂsPç‹ÙÎw-4«1дh5<uVÎç]Îä«iÝjxê‚æôVÛXÏ1ªÅ‘²š?x2jýÍÎ-­öh}^ö[²šbéù×cú^,îÕbT,GI±ˆèÛ³ú6y”ÙPíy/×£hùÛwå‘õ5E³¯yImUùbn÷WۀƙÕ8A0Aw#†.€€;wîÜ!¸CPï¸3š¶ŽpÇØ‡¦îÚ;Ü!¸Cp‡ àAÀ‚€ûhZ†/^bËô“=+ùn¿ô3[ùŠŸ½ìÖÒ+,-ýÜ_Š–>ŽpïLÑò-^‚ïRî"Cé%,–ò=»…['£jåUÚ÷V½ûê§âeû§”=ᅣij{åLÆË—B_¬}Žk߯àÙ{×Þý¯Äí_—¾¦;Zþj1à Wÿá0Öß³yøðÎôb±_0Ææb00öðç‘ÃL/Ý"©ÏãŸblÁùç¥-ù»â\a1æêâט'ë?ëÒ¸£Š àÞªwÿõg—¿.´OW1¶|·’Ñvh-ø«éG– tíò`Æc牿nìˆ-Ë7.ßÍýØ’[ÒÉ«ÏùÚ{íx#^œGõ»TJü®.¡ë“[TƒTü=po×»û[›oíèà݃Ÿ'=9–®Œkt|[^0÷i‹Êˆ9ªÞJãKŒ᪃9¾[¨ß¥R‡yí¡>*ŸÔ÷6½»·ˆ%ëKïW~ÖÏ{Iü.?чˆrä«á3âü%”;ݯ³Cì®>—œó_‰úpöù»ô9Æd ]qޏ8/†€ûíÞÝð£ç¿ïùô¬w7ŽÈ—ç/ãÏ#d¼»þ<ñîNêÝ—‰w—%,/SËÁ×ãˆ÷V½»Ï¿°G®#ëÀPÞÝ£ëî*¾ðÓk5Â_‹Wgó­ˆÝ-»?¾+aM>_‹ˆÞqã Eû®ŽÝûû¥G%¾Pìî%±{hú{5_ÀîíÆîb†(¹£ &o?Q,o˜KžvÍìL\ÃåµÎÆ> n#Æ>¨+3"r¡øä­ŽÝÕçñæ7y¥eý;“£IÆèâ;tef-J¬Ý¤–äÊ ¼;p¯ÖjR»ÁÅEà>}%œ¯]ôp¿ïÜ;wîÜ!àÜ!àAÆ‚î'iÔ½³Ðçèz}Ž®‡Ðçèz}Ž®‡Ðçèzô9ÔK·ãºp¿—n—¬3ô>p¿ÞÕ> à~¸ƒvàÜ!àÜ!à>zÞÑ À¸CÀ}‚¸£€;p‡€ûyG'wàwà÷1ãŽ>îèz¨‹>Ï[¸ýØ?©ýÈ3ݧwGg@À‚€;wî4Ö;Ü!èîpï qoµõQ!4lÜ;›8t_[BÃÁwàÜ;pîÀ¸wàܧ…{hÉËÆ?®üâ¯ÆÏž~-ý[p¯VÛRÕ‰Âî¸GºÂ_–6OUó* ºÀ}âÞ=\y¥‹5<¶LïFï®j3.×[.™w³w—uÉZ/Œ¯péòÀô€ûøp¯s!r¿ºÌË| ؇•#îr\…Î%ÁÜçñƽwY×~UÁ»Ç€ûDq×Þýc6Þ™^l1òˆˆæâcƾÓ<üWÙ¨`Mצ=üÍÞ]¯_(2о_~ä0òá¢&¶õ¾µÒFíæ>pŸ6îŠkÁ_M?"¾Öâ¤>ã±óÄ_ 7vÄ–ÑPìN¿ Ï%ÖžÔæÊÚ¬lmuè+÷î‘h^hø¡õ$ÎOô»Tahxô²K"öÈBì~/ÞÝ#–)~¥í4~ö(¦Hc÷Ƽ»ˆÉEL}ZÛò¤¶]Õ™ÂåØ]Õ¤ë¥véYB8;Ì)‚Bì~7±»øGï÷t5CxXz5Ü`æ§±{i \7vÿè½¼vb¼ÈWÃý:ËÄÏ»b_{Mì.þÉze…®|5^èüØýþ¼»ðyäÝõŠKïÜß6»ŸÕÆÎj+ñíWÄî²&]or†¼»yäÝ-Ýþ–¢iñ*"y¾=¦oÝ÷ÙÚÄKLµ­dm_Žj#›À]ÅîÉøòeì.^E$/šçÑ EŸEhOô€©ê}Äî©¿‘-£ëÝcÅD.bìCãÞÈ2ÒڈﵨȷØ[UÛVþ¨+3FÆ»ÓûÈ¡X&Þ0ö‡M1͵w3|ÜGDü¸7^y÷µqÜDÜ;pîÀ¸wàÜ;pîÀ¸WVäЕ§äæÓ†Žp"î±¼'û?ÜÌ@à¾û(ï…=w:ûí~ЯŸüôýõ¸Õ…ïÕŸx¢é•þ ´Ðïo½b}ôW-·¤>à> ÜÃG}¿ŒWă~U+™Dñ›pÞ«¿pžß!=d_ƒ¹ Þæ}#îUë ç<úä×܇Œ{õãµsS®_¥C}X¿<’WÐágå]ç?üJw»ÄöoÂý?½”3átßððamþ$ÛÙ ¹gÍf‰ibÿgÿ6ÜkÔSû>ãž™)ã.ñFÞ+BÔo\yg¸x÷Ù ßS{’³þ'O p—¿±•«‰"S¼³å}ö<þÙLâRì1³e‹ÜmÕ¥ÙÕë dÀSýt÷$RqLB\ö(ï üùE-ûµ'á?ù*¾wý[‘cú´Z}”õïܽˆ°ß£=v:ºäb»Ûp¯QŸ@ß-®¸O÷Ô—ní`‘òη¶x÷uq4§“Þ}kß‚{ÊV°ÑrR°ØˆiâƒÞ£ÕѶ2êjܫקGrQ}À}ÁŒpÙt¯y¼yçyñ¿ô‡ñßÓ½ßjOƒŒ{øöéÜé7è¾r¾–QÓZEJñ?øäm{Ò\_Ü›p¯\ßëBÕõË;&=UýY¸ìÅ,r.7×þp³žQYÆ»¾mªJ¡?…¶|YhߺK¿+÷ÄÍŒåÝîbfìÝ6U­ZŸ˜¿P™Âú€ûq¯~!2z_«–°â•º":Š/dÖ+^÷¦êîÓÀ½xî™;8~¼uñ^á\07yà7âÞT}À}"¸7XoµqÜDÜ;pîÀ¸wàܧ„;ž¼‡'ïÝî49â;Ü!¸Cp‡ àAÀ‚za=‹û¾ÎE!hܸóÌs+Šq·Ó šî5ž A£÷îÆÅ'Š.=¹w®¼{ö‰¢núDQK?QT>•ˆž5ÎÐ}ÐØqW±»~¢¨znjò”¹ä™s–96úš„w?y¦cö‰¢ÞN?ÓQD8 ÄCñî‡gΚ㘱æIo ¹Õ[ M:˜©ø`ÅŠ‚µBzv$±6[ ÷ª×¹;ñ:öØõ³`àÜ+á^õvrÖ*釸÷¶p¯:…¬èþo&îê¸÷Yu¿®ãiÖé·ýhî¹ – Vœ›^üY£'"àÜÃ]¯Ö®ðãï¶ËØ­¼ƒà~„¶º‘U_Í âü–¼éõ¼ƒvà~ B–rR&t7wàÞ îµb…£\ G¯­·ûªJ€;p?á<™M%س]µü:Þ pO~(7yÓÚk’ﯻ žqo 5ËWQyÖT½Øý•S;Çq1Ü„uåÝÑp‡ àAÀ‚€; uàwîÜ!¸Cp‡ àAÀ‚ÚÀ=·Àˆnöœ¶íéb÷ïN_ƒû¨†îtl‡ÚèSàܧjÿ+coÒ8DmÂÕOŒ}äüÄ»SÜËÊŸ´¾eÜ㟒'ÒP™²¦x¢GaLѲ›{÷òòŸÿ²ìÔ»—õãgï÷’ò‘¶¼Ücªy!ûë¤fûR/?I{©—W²—ãË_3å»Ç=Þ~ä<¥½³E àE–­ÏQg±û…òt^íÌ»;¦[6Ù ân²HØZdõóYì^V~kw»—õcôüŽ1ï÷²òÁü_šš3åá.k^Èš—iÍLq}6Ïôò:·—²—ãž“4`èu¸¯E?í²Bžm–/ùª£æÝ/”ߙʹ¦’w¾‚]„3?™iÆÏ.Ű{eôiì^V>úÑ:õîeýH6ÅŸŽ§ªeåCÉ^k±»ª9T5SßÆÏžªÙçI_g{Ù¦°6œéù¿O±V´|!›cçñÊØ½1ÜɺÐü:ËFßäÓÕÑ–y­),¿ÕµïNÇAx6udÿ:.Q¬ýôYì^R~kó®½{i¿'ñxÆ»—opΔƒ»ªyvRóR[º[œ^ˆ<ïåuÒËdoÜМúøG*Çîjì¾f¼†»bL*¿~»—•O|{WÞ]ĆÁ›Œ‰É¤ÄÄÏîÉH-,ÿ«%¯¨=u»—÷{–ЬÍ-/Û»vÛõîÙš—K“Yõñ94·—W¾Þê÷|/3ã"Î’‘ó’È,e&'v/.Og¸¶pÏ»éÌ}a‡Œ—n%*?BqedžÆîÅå;¿î^Öå†ïO‚™²ã´Õó•–bw3­y™ÖLýKl8î™w/éåµø²á÷‰ûúwyeâdÞíÇîÅå·ÒEº]áo~“×1è€\ X’þ}eì|¤–”?‰Û:ˆÝKú}ÇXf¤c÷’òbØÞT•¯“5Ÿ\ò“¸œçô²}týKÆíêŸìó^½»]ù‡ë–oÛ»¯ÝZ¦×)ßMì^í·tÆóüؽF/;mô2k÷N˜y•nAý÷Ô wª9RŸjW-í•–¿V´¼Nì^÷kúý`wÈÜkŒ?\j·—/[ÚîxwÙ†@žâ*÷{•ؽæHu®$æ6Ëûóî»f_{Mï~?¼h)ÖªŽÆt¬fj%˜îÀý^ïwîÀ¸wàÜ;pŸ~ì>ÜT\/D8‹ØåÅ{Î"ƆÙËωüîC1~°fwØ>U5±VÈÀvàd&wàܧ»wØÜ l¿ÜÑëÀ}¢±;îÀ¸'r¼’ƒÇáâ~l[°à›Óû«ã÷G›;{¸k;…ý¥}UïæÊ¸GŸüâ«­_é÷Ûò,Ý|<úÂf ¸§ö—ö=N«­àþPöásû¸ë•ö9’³j¶åYz·Î`1Ü)ÇÐC—Ö×vò‰ê·ÞqϵêbOšýEf‚~]|‘k¿È5ïbËp·2!íù‡¥ßßQyJ Ì×¶iK·rëuA«eŽ˜-e UùOTr¡Š|XùƒÀìTö‡êÝ¢o°N=N¹ŸéÕª«p§}”*0½€¹÷R¨Í‚ÓÖv&H{äÊ2oŠðÁbØ0›Ïé·¹câ“wbÚ/Þ›^ÈÔöì[„Œ¶mk KÉ–ˆ…·”—eésJQ)3QPMk›Ú9³aLUƒ´oƒEb_«þäû5û¸f‹è!úÎÑ9J)&KßGÆšÞrÏ–}ײAœêûèˆc/mм)«v«¾ÏXµcÆÚ¾÷Ø‘ùwÈ…Š÷ÿ¸Ìµµ¹Ì.df¤mcŽ'’y ÷у¢¼£ÓpÎÅ{QŸˆaÕ~™§Qxû-e› -»måCUmÍéKzÉõÿª¸]ü¾ü,œË6mÃÀ}ë&}»qµ}ízÉ%å¿ ß{ÿmåÇKŸ²wíÂwÞ/]ñž¼©°måGVË!¬¨x‘ ‚;ÑQj•€!µê“´jC¹ööÖMÞÆ8ãÿìrú·‘y"A¢zƒÊdÙÜäNX™òKæ´] ¯&ÆðB³>{2WGïeý6]EÙywm[b©)·D+’|H2«“MÖ‡ê¡2è@pvjûã?{aúЋ}©ê÷‡Èq#õ’©¾ˆeU$©öK[éz×¶mïþð 33Q^´$¦ÍZeæYueâ ™ÇuûD¿,Fåý‹ÕÜ)|£ê]Ð à©á>„¦ô(b¤Ñ©E0î’_­’ûõËö= óªlÓ–ª-W·Bø ¢)~ö(ÏÉ«"‰JÆ–; ÜÕ,CØ/Îc¯ AÙï\zwéYTLL¾LÞËýÊò3_ZŽÝ‹þ›º8ŸSFreÉ+Yõ’µ*<¶êZÜ#¥ å_‰­áË-zº‹aŒÂ;¯é~]PFf1Ë>YÎlj•ãr?½§úe>Þ>¥íÒ¶ÄR©lÅwåó d–sqþ3=ºCŸÑ£÷‹ªòX;µý¢ÍÚ¾N|)ùwò—Ô/áLDP¢ïÔ{µ_Ø¢r SVÇVqOÎ7Ê1 h.#,1µUò½>1OåVV]½¼ƒÎ¥ enÁ+ïë£ú€ò~môZ†ðᢅû‡Øbo›…Š]é9[{Ù*¹ß¦¸êWs“w…yu´m"š¡ù‡xù“²=Tù ׌ž­;ÆKÒ1ú‡•?ÜÉNeð˜vvËÞ}Ÿñî/tN5=ì-å6¥÷¡Ú¯:þ«èø.¼»¨õ¯dÃ?­ ”CR3«ÞS/¢ÜäÊ…²jê«™`;t¡ ;lî@¶w ÛÇc+2€àÜa;p×û2雊Ò: eÿ´lÏý.ÉþAôíÜ!hò³WàwîÜ!h*¸'φV:_ùyíªìãß… Nq/?:Z‡J÷g¶–^ŧb–ý& ÷KÞݹ»óCm)úÑ×ë°.áÿü§dÅ+­qPϣ߯|;òþð€±·´îS3+¶h-Ðot?¼ÚcɇÛ'ßûE®¨^ý$?yÏ9­½sÅ{åߥû•_„¯—%å]èÐmÚ?TöîkÓ‹-›Öƒ¦\%>ybŸ´FßæÛt­míh ­0q“=zu}Ïz¤_‰,›¶(ûÇN•¤ÕÔâ7ÏJÊ3&ޤ|çû“œk›Gr]ŽL:~°('…ü_¯í ?ë¯Ù,z eÈ¢ô÷2£Âš¥Ö>öî‡o"òçK/\©u©D§|'×v¦9‡ô–üÎöM²GF*ú{ž${éÒŠÖEÜÿª2tÐÖáwuÉØqq¡ÃüŽÖRF´6ï“Ï¿Ø_’>ɧ⒒¨ÿ)+ÅÍ+ŧrEÖÊW«¿q2W<öî\0ÿJë¯7¶¤wM«A#y‚'¼s&vßê-'M¼±S,¾G<£1"Î(;S0KcwñOúr')Iñ³qœ!ÅÈIN ‡rRȵشRz㪵Ëj]¶ÊJ‘®ÔŽh-+ßÚzm­ôÿ¹Þ] ûu!=ñò…£HÄØáJs,÷g½;Åî®Ì<£?×,«ïqý]íݹÎkÀyæ³HŽÓäZMh"x‡’\®UýfÊ|22'…ŠËËjí²ÎO!Ý­xŸ|ïÛBæéQkkiNéæyw‡çÆîz÷./v2±ûÒÍ|¼»b™bw­G*7@&v×%™Ì€ %Þý4'qÌ¿­< Deq$OkW*så,}•[SŒ™+î1×»ÿ&à }ef/¯°H_/ßÑþ?®»ÓÖÒK¾“\™9úžŽÏC}qÇÕWfhö |ºº†C郞Ì@‡Ø]椈('ÅwÎßÊœod–<ŸòE¸\ùuE4åüÔ9ô(Ë  > ¼Ÿ:—˜¶ÎxQìAÓË‹Ý!è>p¯tÿ‹¼Æ"s`CШq‡ àAÀ‚€;÷+0>ü¤îgc¶ŸM÷¸TÛ«Og}§å1?NãÙ(z~XÝ Ü;pîÀ¸wàÜ;pîÀ¸wàÜ;pîÀ¸wàÜ{§ð%S¯û%6 j¯cÏ€Œ¯gø5_hÊÌauï ¸Kó«û%Æë~©ÅÖ3^Þ_Ïðk¾Ð”™Ãê^v[[Ø5¸³á„µì’ñµméÅøÁuïM¸×¦ýÊ/µy<êØ3 ãY][X/Æ­{;pîÀ¸÷ó:øNëÇcŒÆ×¶¥ã‡Ö½À¸÷Êæuó¥¡PÜ[=ÜÀ¸÷àίÄóañ>NãkÛÒ‹ñë^àÜ;pîÀý̼®¾4j€{«‡{ظãàÑÞ›ñC½øŽ³Ka{ûÙ ²Ò!3$tGîÐ]âð!àAÀ‚€; ™uàwîÜ!hZ¸³º¦|ûm{ .q¯õ«õËwÑÖq7îÀîÀî£mÀ8&C·¸÷Q?t+;pîÀ¸÷KŠ–~¦D" s‡„{À˜áÖ:áÊî÷Š{ì˜^õ¦Ä–ËCãe8¸‡K—¦_çXl ¸÷›ã£†wæ>7ö°‚™xéÕ8!ƒwŸ4î¯ê|¿·dZ´®~bL`û?âÝ)îe嵇ï÷PäIK˜²KlEË_{”evóï^Zþó_–ðîÆ=zvÉ+¥"N[œý½H°J¿~»_(DW'ÄÄ–ÇC““%"îVñ”Zd—0×:ÝKËoíÅî‘ó:‘Ø:ñ9âÈ+è‹Zó;Ãë wíCÕ¯{áÌãgb^å׎~…ò;³+ZÒñç(ƒf~2ÓŒŸÝ½ˆ`öêÄt»—•~ô£áx÷pQÏonçþ>¹uçݵ§«<¿ ŒF]b­`&–'õ`–¾é˜«ã~~e¦´üNûÒ.ƒ™È!§¦Ï*!Ùå¸D±©g±{Iù­=¤+3Ûz¸Óy·—`F{ÀjVRÿjK{Á]ÎWuô‰YÁyêÄ»çLU Ëï”'í|ªújúáL÷&nÜÃx¥­³”ÿÕ’gæ§q×3¡(o&ôÙ;ǽ¤|â™ZÀ½ÌÊ4~­f%_»\÷}ó¸_¾©®Õy*÷’X\œm¢üؽ¬<}Ö9îóhüÉX|é&±¸òÖt–Ì“ÉGIyÞõu÷ìLÈHgB–œ EÏïóŠgN§åƒù¿´ðWËó5}-®òüÎa3ÞîÉ_bNF¢_»—•ßJçØÇ•™u¥Å–v*n§_Åg§®§¬üÉønž­&Aö\ù¢fBÅáñ§ã¨¸¬|(yj#v/Ÿ¯é9RÅù]l}l/vÇMÃm@ùLHFb%åÛù«G…ùÚ⬋KÊ·»÷âáüþ«ŠŒûÑLHûÁhu2s"ß7sÊ-/Ù[»màž3_{I­Ü>æuqQùýRúúv.Dw—¢ [¿î­Ø:béLHùAú;@øÞ/œ9–'î³`¦|~縧]\V>Þ,’«Õ¸#r ¸rš¢þ³ŸxœÿZÍ›wŒ™y¢òñ¦¥©jéüîpý³ÒüNo÷{ f†m<îwîã‹Ä^ÓóÓ°­|ªk%pî'‘˜}LÌJàÜÌäE±Ü;ÆêSÀ¸wàÜ;pîãÆYÄ€;wîpäp‡ ;Ç=³zý xé¥ëë*³>‚zÃ=ŸC½®ät§¯¯XŬk… Ž‰¯Èb¾w·®óîz}%õˆ»Ì‰;´†DüOëcB•æì•±·KO&ï ÞdJ'ÆÀú·$ÇbAN•S1¶ÔR‘˜ÖôÖ«$yEÂ4ç4µ‹;Ks"ªÕ]2{–Ê­"×n ÷x½¡Êi&¼ûLfQÔ{Îrš$9Z”˜ä[Ôk&óŠèœ#8(P'¸ëœˆÉú]½¶]Äç”ËIÄîñÆMs…©Ü Iì®’¶æ@Ù¯|½V1“o‘>;É+×u‡»Î‰(Y¤¤pgЫG¹ÎÈ÷î³aì‘È·ØéºûQîŸ| Øý÷ôÊL^N“$§"]ðIó-ŠßuuÌ”É+àÊ Ô!î…ëåÐážæ´„ ©ã1õ #µÀA64qïAÀ‚€;wê÷C~ƒ¢¼yûYÍò²Ò:åÛÞ_ËÖŸ£°gØûðé¬õ/´?æïÀx+©¸$àÜ;pîÀ¸wàÜ;pîÀ¸÷‚{rݽ½/´ßúêWcGk|Ÿ†vúü çIDATËÊqgâêdß®ý…–…¶gâÆ÷iø°¬¼÷𹿻|øXãöŒÖø^ ”•¬‰¶´ù….ŽÅäï÷X Ü;pîÀ¸ç[Øêº8“7¾WÇd%p¯u]i¤Æ÷Ž;Ÿî¼¶…cÆ}¼Æw> Ü[þBûãŒïÕðY Ü;pîÀ¸çšØöÚ>wa|¿¸ÆJ܉;"ïÈÊ“LX¶>Ñýlv2Þ~Ú ºwè.qøp‡ àAÀ‚†Ì:p‡€;wî4-ÜY ]S¾ý¶Ž½P—¸×úÕúå»hë¸wàwà÷Ñ6`¡[ ÜÇ‚û(Œº•À¸wàÜ»Tô쉖þyS ËGc.pîƒÆ=^z%bËÌÁ½¨|l¹<4¼îq‹Æ_Ș²ç¤%ã{å÷±â^­¾Bo°u¼{0÷y¼±{ÀÝÊóÄøóÏq·ŠÆ7ߘpŸ0îco—/IZÌ.zõc9ÿ/?\:DzòÚÃwŽ{I4–|V-ãáwKx÷ ã96ß.Q*âtÚ L/²lí#ãÄ»_(Ï·s¿cÜåøóNÇßß1f«SÎ,o¼”ÿüo«®½{éLhî׫ë¤Í¬;+yý±ÖüÎðzÃ=œQ(뽪~¥™©ˆÌÃ%@{ÂÕ îÊïÌ®œcRKì<¥ãoåÓÖ«éGÖG5þBë4v/-<î;÷î…3§×E®ß,,ÿeîGâ¼ÛîÅó;Ñɦ_k~˜ºÄ:¸ï·b$j¨£oòéʯŸ]™)/¿5\Þ1î¡)cñ güÉ?ÝËÊÇ+Ç»osq/*ŸÙß¡w?įլ¤þÍÆ—ãÎèx¿„êèï…‹¾›ñí]➎?Yu¨ÆÙµW'¦Óؽ¬üÆ=ßíãŠÈêÙãû³™ÍãÏÞ9î%åu{ÛÀ½|¾¶›û5æwkW2Ô î—/DRô½Õçvæ%±¸8æê¸Ÿy÷²òä#;Ç]FWË#omy4JõHÕgUV¡ü82u»Ã=&ÄäL(z>¿ŠZV>˜ÿs õ¸<_Ó×â*ÏïÖÌl–‘Z×ÝÅØûãùÌÍ/ŠÝËÊo»¤å4w¨o=z¡X\?ñއïýÜØ=¯<ïúº»{Ù™Ðs2Ö‰Ïü¼±š[>”Ô{-à^>_K¢Œ\ã/×g‰÷âì@keþ¿È%3Ù¦!¨#Ü•ç7®ŠÝ™^7NkÜi]»Ê«RF„w12²Pf™µÝ­G•MQfÇ¿Gùtæ½Q–4“’”Ï(œáh@zwWS´"ó¨èL_”»¼¸šÉ2/ë¬7 «:3H¤ÊèlŠ:WN’â(‡åÒÙ£ ¨ïþìêØ]寤UëB‹C¨Ô»M"ûOæ6Q±½ÎBþÜ’×s¾ÎÒïR+÷'yRb‡5šf‚òXωÝCC?™@gpJòûzIÖ¬”û õò'Þ]s®XÖç‚L~ýYRRn"x‡ºÄ]zíØú¨yÖþø‹š±nÓ|æ‰wßòrSþD»$±»òàTžò¢oÒ¸>ÊäÿË–¤˜&ÄJ¨[ïžä)UQº‘Í=(¼ï¿¥Þ]}¢‚œã+3VÛëø\zqGþÅJgÎq˜Ê¨¯á¨üR¡œ@P‡¸Cp‡ àAÀ‚€;wê÷C~ƒ¢¼دz ý0Îý >ô¦f.ÎrCV÷-„Ñ}X\ëø1Sh%£Ù(‘#îC¦½Îí”ÖBKY[N¼÷š¬÷8·ãæ‡àä§Œ;(ë}wÖfÃǃüøp¢s¯Çúàót‘Ÿ0îlp¬÷x YÝ0øAÜ»b½çÓ8ëlè{ì÷!Ñ^‡õ1ÍÑ:sóÝ#Ü[f}W»6 Ô÷–YçƒPOp«Öü=ÂÈpíÃ;ˆ>rC¸·Áú°néûÈUuó]ôÚ¸xg½Zù¨ñ¡iŽj7*Üûtî•"ë|0ï¿'‰;ë‹u>P hQ‰æÖî`ÌÌ:ܦªÈ÷ÝluÊúÀQ îUÝ| =˦‡;ë–u>| ÕÆî‘¯{w÷t‹{/ÎçÎp¯èæì纸÷w3ëp¶OÜ›)•)rá,9¡ÜГiJUäYCÇuì¸gÓãò0)Ô'…{¡›g'®lTŒ÷&J%=uR0»Å&Ç:ŸâC ÎS6ËwÙa¬²»ŸÖ5QJÑ~ÞQ']ÅûøgÇQjñœt2¸WøóN¤k`Ó}Ë„[–Ñ“c[|õÔÃR͆ä“1Öç½À½17|Ë0©‚õ@vUS ŠÜêw€ûß¹{$X77N[Çî÷ßî×D÷Ô…¸OvåôÐ]4öŽŽ)оû>º“Ø0Wï,à>êã†AüáÞë"ÜÇÜ@ÌOkÍZ9pŸ@qYæ晿9÷É4Ô_ì à>¹Þë…ø*íîÓnàÝDpßP÷:¬ s4n)pG/Ó5˜€p‡ÐgÀîh pGËÐ@àŽ–¡è3´ DŸ¡eh ú -CÑghˆ>CËÐ@ôZÜÑgÀ½?EÝÔbç}olôZ6©†+O€m}<ÚÖ¸¯]ôZ6-ï¾ô rÃ?Ú†wGË&êÝ ïøÙå¡jÞÄ3<ùvÁùú7Æy²%^ áîc‡¥cP ecôîÜq#ÇæáÒvl ø /Þ̸Ø+·˜‰Ì}."œ`(вñÆî"n‘az¼|Ibwáïeì¾]ÈA!¶,7)Oç@–Ñ»ËÈ$^zòðî´ÒåOÆîÁ\n‰GÄ9Ìå]È1<@–7vM?Z’Ûöh;\ºÒƒ“wÿb'[ª˜*(вñÆî‘³àéñ Åîž¼N³U±{hÙr‹yD=•c sõP e£òî韙֌}\»ñ†Ñ?ö¸±ãõïte†¶þ bû@_™±úe€;xOîh pGËÐ@ôZ†¢ÏÐ24}†–µÛ@$·®Ý_À}ü‡Øß7é÷w®Ç³UK;aÚ] ÖcVr§Nzà¹áÀ½6&ã5G¸7tèà9ƒx¾p‡Z#@¸Cp‡ àAÀîè¸Cp‡ 1ëÿÚ‰8A«¨IEND®B`‚WebwareForPython-DBUtils-ed2a1f2/pyproject.toml000066400000000000000000000073601457556542700217170ustar00rootroot00000000000000[build-system] build-backend = "setuptools.build_meta" requires = [ "setuptools>=68", ] [project] name = "DBUtils" version = "3.1.0" description = "Database connections for multi-threaded environments." license = {text = "MIT License"} authors = [{name = "Christoph Zwerschke", email = "cito@online.de"}] requires-python = ">3.7" classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Database", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules", ] [project.optional-dependencies] pg = [ "PyGreSQL>=5", ] docs = [ "docutils", ] tests = [ "pytest>=7", "ruff", ] [project.readme] file = "README.md" content-type = "text/markdown" [project.urls] Homepage = "https://webwareforpython.github.io/DBUtils/" Download = "https://pypi.org/project/DBUtils/" Documentation = "https://webwareforpython.github.io/DBUtils/main.html" Changelog = "https://webwareforpython.github.io/DBUtils/changelog.html" "Issue Tracker" = "https://github.com/WebwareForPython/DBUtils/issues" "Source Code" = "https://github.com/WebwareForPython/DBUtils" [tool.setuptools] packages = ["dbutils"] platforms = ["any"] include-package-data = false [tool.ruff] line-length = 79 target-version = "py37" [tool.ruff.lint] select = [ "A", # flake8-builtins # "ANN", # flake8-annotations "ARG", # flake8-unused-arguments "B", # flake8-bugbear # "BLE", # flake8-blind-except "C4", # flake8-comprehensions "C90", # McCabe cyclomatic complexity "COM", # flake8-commas "D", # pydocstyle "DTZ", # flake8-datetimez "E", # pycodestyle # "EM", # flake8-errmsg "ERA", # eradicate "EXE", # flake8-executable "F", # Pyflakes # "FBT", # flake8-boolean-trap "G", # flake8-logging-format "I", # isort "ICN", # flake8-import-conventions "INP", # flake8-no-pep420 "INT", # flake8-gettext "ISC", # flake8-implicit-str-concat "N", # pep8-naming "PGH", # pygrep-hooks "PIE", # flake8-pie "PL", # Pylint "PT", # flake8-pytest-style "PTH", # flake8-use-pathlib "PYI", # flake8-pyi # "Q", # flake8-quotes "RET", # flake8-return "RSE", # flake8-raise "RUF", # Ruff-specific rules "S", # flake8-bandit # "SLF", # flake8-self "SIM", # flake8-simplify "T10", # flake8-debugger "T20", # flake8-print "TCH", # flake8-type-checking "TID", # flake8-tidy-imports # "TRY", # tryceratops "UP", # pyupgrade "W", # pycodestyle "YTT", # flake8-2020 ] # Note: use `ruff rule ...` to see explanations of rules ignore = [ "D203", # no blank line before class docstring "D213", # multi-line docstrings should not start at second line ] [tool.ruff.lint.mccabe] max-complexity = 30 [tool.ruff.lint.flake8-quotes] inline-quotes = "double" [tool.ruff.lint.pylint] max-args = 12 max-branches = 35 max-statements = 95 [tool.ruff.lint.per-file-ignores] "docs/*" = [ "INP001", # allow stand-alone scripts "T201", # allow print statements ] "tests/*" = [ "D", # no docstrings necessary here "PLR2004", # allow magic values "S101", # allow assert statements ] [tool.codespell] skip = '.git,.tox,.venv,*.de.html,*.de.rst,build,dist,local' quiet-level = 2 WebwareForPython-DBUtils-ed2a1f2/tests/000077500000000000000000000000001457556542700201375ustar00rootroot00000000000000WebwareForPython-DBUtils-ed2a1f2/tests/__init__.py000066400000000000000000000001701457556542700222460ustar00rootroot00000000000000"""The DBUtils tests package.""" # make sure the mock pg module is installed from . import mock_pg as pg # noqa: F401 WebwareForPython-DBUtils-ed2a1f2/tests/mock_db.py000066400000000000000000000070071457556542700221130ustar00rootroot00000000000000"""This module serves as a mock object for the DB-API 2 module""" import sys import pytest __all__ = ['dbapi'] threadsafety = 2 @pytest.fixture() def dbapi(): """Get mock DB API 2 module.""" mock_db = sys.modules[__name__] mock_db.threadsafety = 2 return mock_db class Error(Exception): pass class DatabaseError(Error): pass class OperationalError(DatabaseError): pass class InterfaceError(DatabaseError): pass class InternalError(DatabaseError): pass class ProgrammingError(DatabaseError): pass def connect(database=None, user=None): return Connection(database, user) class Connection: has_ping = False num_pings = 0 def __init__(self, database=None, user=None): self.database = database self.user = user self.valid = False if database == 'error': raise OperationalError self.open_cursors = 0 self.num_uses = 0 self.num_queries = 0 self.num_pings = 0 self.session = [] self.valid = True def close(self): if not self.valid: raise InternalError self.open_cursors = 0 self.num_uses = 0 self.num_queries = 0 self.session = [] self.valid = False def commit(self): if not self.valid: raise InternalError self.session.append('commit') def rollback(self): if not self.valid: raise InternalError self.session.append('rollback') def ping(self): cls = self.__class__ cls.num_pings += 1 if not cls.has_ping: raise AttributeError if not self.valid: raise OperationalError def cursor(self, name=None): if not self.valid: raise InternalError return Cursor(self, name) class Cursor: def __init__(self, con, name=None): self.con = con self.valid = False if name == 'error': raise OperationalError self.result = None self.inputsizes = [] self.outputsizes = {} con.open_cursors += 1 self.valid = True def close(self): if not self.valid: raise InternalError self.con.open_cursors -= 1 self.valid = False def execute(self, operation): if not self.valid or not self.con.valid: raise InternalError self.con.num_uses += 1 if operation.startswith('select '): self.con.num_queries += 1 self.result = operation[7:] elif operation.startswith('set '): self.con.session.append(operation[4:]) self.result = None elif operation == 'get sizes': self.result = (self.inputsizes, self.outputsizes) self.inputsizes = [] self.outputsizes = {} else: raise ProgrammingError def fetchone(self): if not self.valid: raise InternalError result = self.result self.result = None return result def callproc(self, procname): if not self.valid or not self.con.valid or not procname: raise InternalError self.con.num_uses += 1 def setinputsizes(self, sizes): if not self.valid: raise InternalError self.inputsizes = sizes def setoutputsize(self, size, column=None): if not self.valid: raise InternalError self.outputsizes[column] = size def __del__(self): if self.valid: self.close() WebwareForPython-DBUtils-ed2a1f2/tests/mock_pg.py000066400000000000000000000046031457556542700221330ustar00rootroot00000000000000"""This module serves as a mock object for the pg API module""" import sys sys.modules['pg'] = sys.modules[__name__] class Error(Exception): pass class DatabaseError(Error): pass class InternalError(DatabaseError): pass class ProgrammingError(DatabaseError): pass def connect(*args, **kwargs): return PgConnection(*args, **kwargs) class PgConnection: """The underlying pg API connection class.""" def __init__(self, dbname=None, user=None): self.db = dbname self.user = user self.num_queries = 0 self.session = [] if dbname == 'error': self.status = False self.valid = False raise InternalError self.status = True self.valid = True def close(self): if not self.valid: raise InternalError self.num_queries = 0 self.session = [] self.status = False self.valid = False def reset(self): self.num_queries = 0 self.session = [] self.status = True self.valid = True def query(self, qstr): if not self.valid: raise InternalError if qstr in ('begin', 'end', 'commit', 'rollback'): self.session.append(qstr) return None if qstr.startswith('select '): self.num_queries += 1 return qstr[7:] if qstr.startswith('set '): self.session.append(qstr[4:]) return None raise ProgrammingError class DB: """Wrapper class for the pg API connection class.""" def __init__(self, *args, **kw): self.db = connect(*args, **kw) self.dbname = self.db.db self.__args = args, kw def __getattr__(self, name): if not self.db: raise AttributeError return getattr(self.db, name) def close(self): if not self.db: raise InternalError self.db.close() self.db = None def reopen(self): if self.db: self.close() try: self.db = connect(*self.__args[0], **self.__args[1]) except Exception: self.db = None raise def query(self, qstr): if not self.db: raise InternalError return self.db.query(qstr) def get_tables(self): if not self.db: raise InternalError return 'test' WebwareForPython-DBUtils-ed2a1f2/tests/test_persistent_db.py000066400000000000000000000173511457556542700244240ustar00rootroot00000000000000"""Test the PersistentDB module. Note: We don't test performance here, so the test does not predicate whether PersistentDB actually will help in improving performance or not. We also assume that the underlying SteadyDB connections are tested. Copyright and credit info: * This test was contributed by Christoph Zwerschke """ from queue import Empty, Queue from threading import Thread import pytest from dbutils.persistent_db import NotSupportedError, PersistentDB, local from .mock_db import dbapi # noqa: F401 def test_version(): from dbutils import __version__, persistent_db assert persistent_db.__version__ == __version__ assert PersistentDB.version == __version__ @pytest.mark.parametrize("threadsafety", [None, 0]) def test_no_threadsafety(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety with pytest.raises(NotSupportedError): PersistentDB(dbapi) @pytest.mark.parametrize("closeable", [False, True]) def test_close(dbapi, closeable): # noqa: F811 persist = PersistentDB(dbapi, closeable=closeable) db = persist.connection() assert db._con.valid is True db.close() assert closeable ^ db._con.valid db.close() assert closeable ^ db._con.valid db._close() assert db._con.valid is False db._close() assert db._con.valid is False def test_connection(dbapi): # noqa: F811 persist = PersistentDB(dbapi) db = persist.connection() db_con = db._con assert db_con.database is None assert db_con.user is None db2 = persist.connection() assert db == db2 db3 = persist.dedicated_connection() assert db == db3 db3.close() db2.close() db.close() def test_threads(dbapi): # noqa: F811 num_threads = 3 persist = PersistentDB(dbapi, closeable=True) query_queue, result_queue = [], [] for _i in range(num_threads): query_queue.append(Queue(1)) result_queue.append(Queue(1)) def run_queries(idx): this_db = persist.connection() db = None while True: try: q = query_queue[idx].get(timeout=1) except Empty: q = None if not q: break db = persist.connection() if db != this_db: res = 'error - not persistent' elif q == 'ping': res = 'ok - thread alive' elif q == 'close': db.close() res = 'ok - connection closed' else: cursor = db.cursor() cursor.execute(q) res = cursor.fetchone() cursor.close() res = f'{idx}({db._usage}): {res}' result_queue[idx].put(res, timeout=1) if db: db.close() threads = [] for i in range(num_threads): thread = Thread(target=run_queries, args=(i,)) threads.append(thread) thread.start() for i in range(num_threads): query_queue[i].put('ping', timeout=1) for i in range(num_threads): r = result_queue[i].get(timeout=1) assert r == f'{i}(0): ok - thread alive' assert threads[i].is_alive() for i in range(num_threads): for j in range(i + 1): query_queue[i].put(f'select test{j}', timeout=1) r = result_queue[i].get(timeout=1) assert r == f'{i}({j + 1}): test{j}' query_queue[1].put('select test4', timeout=1) r = result_queue[1].get(timeout=1) assert r == '1(3): test4' query_queue[1].put('close', timeout=1) r = result_queue[1].get(timeout=1) assert r == '1(3): ok - connection closed' for j in range(2): query_queue[1].put(f'select test{j}', timeout=1) r = result_queue[1].get(timeout=1) assert r == f'1({j + 1}): test{j}' for i in range(num_threads): assert threads[i].is_alive() query_queue[i].put('ping', timeout=1) for i in range(num_threads): r = result_queue[i].get(timeout=1) assert r == f'{i}({i + 1}): ok - thread alive' assert threads[i].is_alive() for i in range(num_threads): query_queue[i].put(None, timeout=1) def test_maxusage(dbapi): # noqa: F811 persist = PersistentDB(dbapi, 20) db = persist.connection() assert db._maxusage == 20 for i in range(100): cursor = db.cursor() cursor.execute(f'select test{i}') r = cursor.fetchone() cursor.close() assert r == f'test{i}' assert db._con.valid is True j = i % 20 + 1 assert db._usage == j assert db._con.num_uses == j assert db._con.num_queries == j def test_setsession(dbapi): # noqa: F811 persist = PersistentDB(dbapi, 3, ('set datestyle',)) db = persist.connection() assert db._maxusage == 3 assert db._setsession_sql == ('set datestyle',) assert db._con.session == ['datestyle'] cursor = db.cursor() cursor.execute('set test') cursor.fetchone() cursor.close() for _i in range(3): assert db._con.session == ['datestyle', 'test'] cursor = db.cursor() cursor.execute('select test') cursor.fetchone() cursor.close() assert db._con.session == ['datestyle'] def test_threadlocal(dbapi): # noqa: F811 persist = PersistentDB(dbapi) assert isinstance(persist.thread, local) class Threadlocal: pass persist = PersistentDB(dbapi, threadlocal=Threadlocal) assert isinstance(persist.thread, Threadlocal) def test_ping_check(dbapi): # noqa: F811 con_cls = dbapi.Connection con_cls.has_ping = True con_cls.num_pings = 0 persist = PersistentDB(dbapi, 0, None, None, 0, True) db = persist.connection() assert db._con.valid is True assert con_cls.num_pings == 0 db.close() db = persist.connection() assert db._con.valid is False assert con_cls.num_pings == 0 persist = PersistentDB(dbapi, 0, None, None, 1, True) db = persist.connection() assert db._con.valid is True assert con_cls.num_pings == 1 db.close() db = persist.connection() assert db._con.valid is True assert con_cls.num_pings == 2 persist = PersistentDB(dbapi, 0, None, None, 2, True) db = persist.connection() assert db._con.valid is True assert con_cls.num_pings == 2 db.close() db = persist.connection() assert db._con.valid is False assert con_cls.num_pings == 2 cursor = db.cursor() assert db._con.valid is True assert con_cls.num_pings == 3 cursor.execute('select test') assert db._con.valid is True assert con_cls.num_pings == 3 persist = PersistentDB(dbapi, 0, None, None, 4, True) db = persist.connection() assert db._con.valid is True assert con_cls.num_pings == 3 db.close() db = persist.connection() assert db._con.valid is False assert con_cls.num_pings == 3 cursor = db.cursor() db._con.close() assert db._con.valid is False assert con_cls.num_pings == 3 cursor.execute('select test') assert db._con.valid is True assert con_cls.num_pings == 4 con_cls.has_ping = False con_cls.num_pings = 0 def test_failed_transaction(dbapi): # noqa: F811 persist = PersistentDB(dbapi) db = persist.connection() cursor = db.cursor() db._con.close() cursor.execute('select test') db.begin() db._con.close() with pytest.raises(dbapi.InternalError): cursor.execute('select test') cursor.execute('select test') db.begin() db.cancel() db._con.close() cursor.execute('select test') def test_context_manager(dbapi): # noqa: F811 persist = PersistentDB(dbapi) with persist.connection() as db: with db.cursor() as cursor: cursor.execute('select test') r = cursor.fetchone() assert r == 'test' WebwareForPython-DBUtils-ed2a1f2/tests/test_persistent_pg.py000066400000000000000000000113171457556542700244410ustar00rootroot00000000000000"""Test the PersistentPg module. Note: We don't test performance here, so the test does not predicate whether PersistentPg actually will help in improving performance or not. We also assume that the underlying SteadyPg connections are tested. Copyright and credit info: * This test was contributed by Christoph Zwerschke """ from queue import Empty, Queue from threading import Thread import pg import pytest from dbutils.persistent_pg import PersistentPg def test_version(): from dbutils import __version__, persistent_pg assert persistent_pg.__version__ == __version__ assert PersistentPg.version == __version__ @pytest.mark.parametrize("closeable", [False, True]) def test_close(closeable): persist = PersistentPg(closeable=closeable) db = persist.connection() assert db._con.db assert db._con.valid is True db.close() assert closeable ^ (db._con.db is not None and db._con.valid) db.close() assert closeable ^ (db._con.db is not None and db._con.valid) db._close() assert not db._con.db db._close() assert not db._con.db def test_threads(): num_threads = 3 persist = PersistentPg() query_queue, result_queue = [], [] for _i in range(num_threads): query_queue.append(Queue(1)) result_queue.append(Queue(1)) def run_queries(idx): this_db = persist.connection().db db = None while True: try: q = query_queue[idx].get(timeout=1) except Empty: q = None if not q: break db = persist.connection() if db.db != this_db: res = 'error - not persistent' elif q == 'ping': res = 'ok - thread alive' elif q == 'close': db.db.close() res = 'ok - connection closed' else: res = db.query(q) res = f'{idx}({db._usage}): {res}' result_queue[idx].put(res, timeout=1) if db: db.close() threads = [] for i in range(num_threads): thread = Thread(target=run_queries, args=(i,)) threads.append(thread) thread.start() for i in range(num_threads): query_queue[i].put('ping', timeout=1) for i in range(num_threads): r = result_queue[i].get(timeout=1) assert r == f'{i}(0): ok - thread alive' assert threads[i].is_alive() for i in range(num_threads): for j in range(i + 1): query_queue[i].put(f'select test{j}', timeout=1) r = result_queue[i].get(timeout=1) assert r == f'{i}({j + 1}): test{j}' query_queue[1].put('select test4', timeout=1) r = result_queue[1].get(timeout=1) assert r == '1(3): test4' query_queue[1].put('close', timeout=1) r = result_queue[1].get(timeout=1) assert r == '1(3): ok - connection closed' for j in range(2): query_queue[1].put(f'select test{j}', timeout=1) r = result_queue[1].get(timeout=1) assert r == f'1({j + 1}): test{j}' for i in range(num_threads): assert threads[i].is_alive() query_queue[i].put('ping', timeout=1) for i in range(num_threads): r = result_queue[i].get(timeout=1) assert r == f'{i}({i + 1}): ok - thread alive' assert threads[i].is_alive() for i in range(num_threads): query_queue[i].put(None, timeout=1) def test_maxusage(): persist = PersistentPg(20) db = persist.connection() assert db._maxusage == 20 for i in range(100): r = db.query(f'select test{i}') assert r == f'test{i}' assert db.db.status j = i % 20 + 1 assert db._usage == j assert db.num_queries == j def test_setsession(): persist = PersistentPg(3, ('set datestyle',)) db = persist.connection() assert db._maxusage == 3 assert db._setsession_sql == ('set datestyle',) assert db.db.session == ['datestyle'] db.query('set test') for _i in range(3): assert db.db.session == ['datestyle', 'test'] db.query('select test') assert db.db.session == ['datestyle'] def test_failed_transaction(): persist = PersistentPg() db = persist.connection() db._con.close() assert db.query('select test') == 'test' db.begin() db._con.close() with pytest.raises(pg.InternalError): db.query('select test') assert db.query('select test') == 'test' db.begin() assert db.query('select test') == 'test' db.rollback() db._con.close() assert db.query('select test') == 'test' def test_context_manager(): persist = PersistentPg() with persist.connection() as db: db.query('select test') assert db.num_queries == 1 WebwareForPython-DBUtils-ed2a1f2/tests/test_pooled_db.py000066400000000000000000001200131457556542700234740ustar00rootroot00000000000000"""Test the PooledDB module. Note: We don't test performance here, so the test does not predicate whether PooledDB actually will help in improving performance or not. We also assume that the underlying SteadyDB connections are tested. Copyright and credit info: * This test was contributed by Christoph Zwerschke """ from queue import Empty, Queue from threading import Thread import pytest from dbutils.pooled_db import ( InvalidConnectionError, NotSupportedError, PooledDB, SharedDBConnection, TooManyConnectionsError, ) from dbutils.steady_db import SteadyDBConnection from .mock_db import dbapi # noqa: F401 def test_version(): from dbutils import __version__, pooled_db assert pooled_db.__version__ == __version__ assert PooledDB.version == __version__ @pytest.mark.parametrize("threadsafety", [None, 0]) def test_no_threadsafety(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety with pytest.raises(NotSupportedError): PooledDB(dbapi) @pytest.mark.parametrize("threadsafety", [1, 2, 3]) def test_threadsafety(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety pool = PooledDB(dbapi, 0, 0, 1) assert hasattr(pool, '_maxshared') if threadsafety > 1: assert pool._maxshared == 1 assert hasattr(pool, '_shared_cache') else: assert pool._maxshared == 0 assert not hasattr(pool, '_shared_cache') @pytest.mark.parametrize("threadsafety", [1, 2]) def test_create_connection(dbapi, threadsafety): # noqa: F811, PLR0915 dbapi.threadsafety = threadsafety shareable = threadsafety > 1 pool = PooledDB( dbapi, 1, 1, 1, 0, False, None, None, True, None, None, 'PooledDBTestDB', user='PooledDBTestUser') assert hasattr(pool, '_idle_cache') assert len(pool._idle_cache) == 1 if shareable: assert hasattr(pool, '_shared_cache') assert len(pool._shared_cache) == 0 else: assert not hasattr(pool, '_shared_cache') assert hasattr(pool, '_maxusage') assert pool._maxusage is None assert hasattr(pool, '_setsession') assert pool._setsession is None con = pool._idle_cache[0] assert isinstance(con, SteadyDBConnection) assert hasattr(con, '_maxusage') assert con._maxusage == 0 assert hasattr(con, '_setsession_sql') assert con._setsession_sql is None db = pool.connection() assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 1 assert hasattr(db, '_con') assert db._con == con assert hasattr(db, 'cursor') assert hasattr(db, '_usage') assert db._usage == 0 assert hasattr(con, '_con') db_con = con._con assert hasattr(db_con, 'database') assert db_con.database == 'PooledDBTestDB' assert hasattr(db_con, 'user') assert db_con.user == 'PooledDBTestUser' assert hasattr(db_con, 'open_cursors') assert db_con.open_cursors == 0 assert hasattr(db_con, 'num_uses') assert db_con.num_uses == 0 assert hasattr(db_con, 'num_queries') assert db_con.num_queries == 0 cursor = db.cursor() assert db_con.open_cursors == 1 cursor.execute('select test') r = cursor.fetchone() cursor.close() assert db_con.open_cursors == 0 assert r == 'test' assert db_con.num_queries == 1 assert db._usage == 1 cursor = db.cursor() assert db_con.open_cursors == 1 cursor.execute('set sessiontest') cursor2 = db.cursor() assert db_con.open_cursors == 2 cursor2.close() assert db_con.open_cursors == 1 cursor.close() assert db_con.open_cursors == 0 assert db_con.num_queries == 1 assert db._usage == 2 assert db_con.session == ['rollback', 'sessiontest'] pool = PooledDB(dbapi, 1, 1, 1) assert len(pool._idle_cache) == 1 if shareable: assert len(pool._shared_cache) == 0 db = pool.connection() assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 1 db.close() assert len(pool._idle_cache) == 1 if shareable: assert len(pool._shared_cache) == 0 db = pool.connection(True) assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 1 db.close() assert len(pool._idle_cache) == 1 if shareable: assert len(pool._shared_cache) == 0 db = pool.connection(False) assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 assert db._usage == 0 db_con = db._con._con assert db_con.database is None assert db_con.user is None db.close() assert len(pool._idle_cache) == 1 if shareable: assert len(pool._shared_cache) == 0 db = pool.dedicated_connection() assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 assert db._usage == 0 db_con = db._con._con assert db_con.database is None assert db_con.user is None db.close() assert len(pool._idle_cache) == 1 if shareable: assert len(pool._shared_cache) == 0 pool = PooledDB(dbapi, 0, 0, 0, 0, False, 3, ('set datestyle',)) assert pool._maxusage == 3 assert pool._setsession == ('set datestyle',) con = pool.connection()._con assert con._maxusage == 3 assert con._setsession_sql == ('set datestyle',) @pytest.mark.parametrize("threadsafety", [1, 2]) def test_close_connection(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety shareable = threadsafety > 1 pool = PooledDB( dbapi, 0, 1, 1, 0, False, None, None, True, None, None, 'PooledDBTestDB', user='PooledDBTestUser') assert hasattr(pool, '_idle_cache') assert len(pool._idle_cache) == 0 db = pool.connection() assert hasattr(db, '_con') con = db._con assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 1 assert hasattr(db, '_shared_con') shared_con = db._shared_con assert pool._shared_cache[0] == shared_con assert hasattr(shared_con, 'shared') assert shared_con.shared == 1 assert hasattr(shared_con, 'con') assert shared_con.con == con assert isinstance(con, SteadyDBConnection) assert hasattr(con, '_con') db_con = con._con assert hasattr(db_con, 'num_queries') assert db._usage == 0 assert db_con.num_queries == 0 db.cursor().execute('select test') assert db._usage == 1 assert db_con.num_queries == 1 db.close() assert db._con is None if shareable: assert db._shared_con is None assert shared_con.shared == 0 with pytest.raises(InvalidConnectionError): assert db._usage assert not hasattr(db_con, '_num_queries') assert len(pool._idle_cache) == 1 assert pool._idle_cache[0]._con == db_con if shareable: assert len(pool._shared_cache) == 0 db.close() if shareable: assert shared_con.shared == 0 db = pool.connection() assert db._con == con assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 1 shared_con = db._shared_con assert pool._shared_cache[0] == shared_con assert shared_con.con == con assert shared_con.shared == 1 assert db._usage == 1 assert db_con.num_queries == 1 assert hasattr(db_con, 'database') assert db_con.database == 'PooledDBTestDB' assert hasattr(db_con, 'user') assert db_con.user == 'PooledDBTestUser' db.cursor().execute('select test') assert db_con.num_queries == 2 db.cursor().execute('select test') assert db_con.num_queries == 3 db.close() assert len(pool._idle_cache) == 1 assert pool._idle_cache[0]._con == db_con if shareable: assert len(pool._shared_cache) == 0 db = pool.connection(False) assert db._con == con assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 db.close() assert len(pool._idle_cache) == 1 if shareable: assert len(pool._shared_cache) == 0 @pytest.mark.parametrize("threadsafety", [1, 2]) def test_close_all(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety shareable = threadsafety > 1 pool = PooledDB(dbapi, 10) assert len(pool._idle_cache) == 10 pool.close() assert len(pool._idle_cache) == 0 pool = PooledDB(dbapi, 10) closed = ['no'] def close(what=closed): what[0] = 'yes' pool._idle_cache[7]._con.close = close assert closed == ['no'] del pool assert closed == ['yes'] pool = PooledDB(dbapi, 10, 10, 5) assert len(pool._idle_cache) == 10 if shareable: assert len(pool._shared_cache) == 0 cache = [] for _i in range(5): cache.append(pool.connection()) assert len(pool._idle_cache) == 5 if shareable: assert len(pool._shared_cache) == 5 else: assert len(pool._idle_cache) == 5 pool.close() assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 pool = PooledDB(dbapi, 10, 10, 5) closed = [] def close_idle(what=closed): what.append('idle') def close_shared(what=closed): what.append('shared') if shareable: cache = [] for _i in range(5): cache.append(pool.connection()) pool._shared_cache[3].con.close = close_shared else: pool._idle_cache[7]._con.close = close_shared pool._idle_cache[3]._con.close = close_idle assert closed == [] del pool if shareable: del cache assert closed == ['idle', 'shared'] @pytest.mark.parametrize("threadsafety", [1, 2]) def test_shareable_connection(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety shareable = threadsafety > 1 pool = PooledDB(dbapi, 0, 1, 2) assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 db1 = pool.connection() assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 1 db2 = pool.connection() assert db1._con != db2._con assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 2 db3 = pool.connection() assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 2 assert db3._con == db1._con assert db1._shared_con.shared == 2 assert db2._shared_con.shared == 1 else: assert db3._con != db1._con assert db3._con != db2._con db4 = pool.connection() assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 2 assert db4._con == db2._con assert db1._shared_con.shared == 2 assert db2._shared_con.shared == 2 else: assert db4._con != db1._con assert db4._con != db2._con assert db4._con != db3._con db5 = pool.connection(False) assert db5._con != db1._con assert db5._con != db2._con assert db5._con != db3._con assert db5._con != db4._con assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 2 assert db1._shared_con.shared == 2 assert db2._shared_con.shared == 2 db5.close() assert len(pool._idle_cache) == 1 db5 = pool.connection() if shareable: assert len(pool._idle_cache) == 1 assert len(pool._shared_cache) == 2 assert db5._shared_con.shared == 3 else: assert len(pool._idle_cache) == 0 pool = PooledDB(dbapi, 0, 0, 1) assert len(pool._idle_cache) == 0 db1 = pool.connection(False) if shareable: assert len(pool._shared_cache) == 0 db2 = pool.connection() if shareable: assert len(pool._shared_cache) == 1 db3 = pool.connection() if shareable: assert len(pool._shared_cache) == 1 assert db2._con == db3._con else: assert db2._con != db3._con del db3 if shareable: assert len(pool._idle_cache) == 0 assert len(pool._shared_cache) == 1 else: assert len(pool._idle_cache) == 1 del db2 if shareable: assert len(pool._idle_cache) == 1 assert len(pool._shared_cache) == 0 else: assert len(pool._idle_cache) == 2 del db1 if shareable: assert len(pool._idle_cache) == 2 assert len(pool._shared_cache) == 0 else: assert len(pool._idle_cache) == 3 @pytest.mark.parametrize("threadsafety", [1, 2]) def test_min_max_cached(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety shareable = threadsafety > 1 pool = PooledDB(dbapi, 3) assert len(pool._idle_cache) == 3 cache = [pool.connection() for _i in range(3)] assert len(pool._idle_cache) == 0 assert cache del cache assert len(pool._idle_cache) == 3 cache = [pool.connection() for _i in range(6)] assert len(pool._idle_cache) == 0 assert cache del cache assert len(pool._idle_cache) == 6 pool = PooledDB(dbapi, 0, 3) assert len(pool._idle_cache) == 0 cache = [pool.connection() for _i in range(3)] assert len(pool._idle_cache) == 0 assert cache del cache assert len(pool._idle_cache) == 3 cache = [pool.connection() for _i in range(6)] assert len(pool._idle_cache) == 0 assert cache del cache assert len(pool._idle_cache) == 3 pool = PooledDB(dbapi, 3, 3) assert len(pool._idle_cache) == 3 cache = [pool.connection() for _i in range(3)] assert len(pool._idle_cache) == 0 assert cache del cache assert len(pool._idle_cache) == 3 cache = [pool.connection() for _i in range(6)] assert len(pool._idle_cache) == 0 assert cache del cache assert len(pool._idle_cache) == 3 pool = PooledDB(dbapi, 3, 2) assert len(pool._idle_cache) == 3 cache = [pool.connection() for _i in range(4)] assert len(pool._idle_cache) == 0 assert cache del cache assert len(pool._idle_cache) == 3 pool = PooledDB(dbapi, 2, 5) assert len(pool._idle_cache) == 2 cache = [pool.connection() for _i in range(10)] assert len(pool._idle_cache) == 0 assert cache del cache assert len(pool._idle_cache) == 5 pool = PooledDB(dbapi, 1, 2, 3) assert len(pool._idle_cache) == 1 cache = [pool.connection(False) for _i in range(4)] assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 assert cache del cache assert len(pool._idle_cache) == 2 cache = [pool.connection() for _i in range(10)] assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 3 assert cache del cache assert len(pool._idle_cache) == 2 if shareable: assert len(pool._shared_cache) == 0 pool = PooledDB(dbapi, 1, 3, 2) assert len(pool._idle_cache) == 1 cache = [pool.connection(False) for _i in range(4)] assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 assert cache del cache assert len(pool._idle_cache) == 3 cache = [pool.connection() for _i in range(10)] if shareable: assert len(pool._idle_cache) == 1 assert len(pool._shared_cache) == 2 else: assert len(pool._idle_cache) == 0 assert cache del cache assert len(pool._idle_cache) == 3 if shareable: assert len(pool._shared_cache) == 0 @pytest.mark.parametrize("threadsafety", [1, 2]) def test_max_shared(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety shareable = threadsafety > 1 pool = PooledDB(dbapi) assert len(pool._idle_cache) == 0 cache = [pool.connection() for _i in range(10)] assert len(cache) == 10 assert len(pool._idle_cache) == 0 pool = PooledDB(dbapi, 1, 1, 0) assert len(pool._idle_cache) == 1 cache = [pool.connection() for _i in range(10)] assert len(cache) == 10 assert len(pool._idle_cache) == 0 pool = PooledDB(dbapi, 0, 0, 1) cache = [pool.connection() for _i in range(10)] assert len(cache) == 10 assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 1 pool = PooledDB(dbapi, 1, 1, 1) assert len(pool._idle_cache) == 1 cache = [pool.connection() for _i in range(10)] assert len(cache) == 10 assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 1 pool = PooledDB(dbapi, 0, 0, 7) cache = [pool.connection(False) for _i in range(3)] assert len(cache) == 3 assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 cache = [pool.connection() for _i in range(10)] assert len(cache) == 10 assert len(pool._idle_cache) == 3 if shareable: assert len(pool._shared_cache) == 7 def test_sort_shared(dbapi): # noqa: F811 pool = PooledDB(dbapi, 0, 4, 4) cache = [] for _i in range(6): db = pool.connection() db.cursor().execute('select test') cache.append(db) for i, db in enumerate(cache): assert db._shared_con.shared == 1 if 2 <= i < 4 else 2 cache[2].begin() cache[3].begin() db = pool.connection() assert db._con is cache[0]._con db.close() cache[3].rollback() db = pool.connection() assert db._con is cache[3]._con @pytest.mark.parametrize("threadsafety", [1, 2]) def test_equally_shared(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety shareable = threadsafety > 1 pool = PooledDB(dbapi, 5, 5, 5) assert len(pool._idle_cache) == 5 for _i in range(15): db = pool.connection(False) db.cursor().execute('select test') db.close() assert len(pool._idle_cache) == 5 for i in range(5): con = pool._idle_cache[i] assert con._usage == 3 assert con._con.num_queries == 3 cache = [] for _i in range(35): db = pool.connection() db.cursor().execute('select test') cache.append(db) del db assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 5 for i in range(5): con = pool._shared_cache[i] assert con.shared == 7 con = con.con assert con._usage == 10 assert con._con.num_queries == 10 del cache assert len(pool._idle_cache) == 5 if shareable: assert len(pool._shared_cache) == 0 @pytest.mark.parametrize("threadsafety", [1, 2]) def test_many_shared(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety shareable = threadsafety > 1 pool = PooledDB(dbapi, 0, 0, 5) cache = [] for _i in range(35): db = pool.connection() db.cursor().execute('select test1') db.cursor().execute('select test2') db.cursor().callproc('test3') cache.append(db) del db assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 5 for i in range(5): con = pool._shared_cache[i] assert con.shared == 7 con = con.con assert con._usage == 21 assert con._con.num_queries == 14 cache[3] = cache[8] = cache[33] = None cache[12] = cache[17] = cache[34] = None assert len(pool._shared_cache) == 5 assert pool._shared_cache[0].shared == 7 assert pool._shared_cache[1].shared == 7 assert pool._shared_cache[2].shared == 5 assert pool._shared_cache[3].shared == 4 assert pool._shared_cache[4].shared == 6 for db in cache: if db: db.cursor().callproc('test4') for _i in range(6): db = pool.connection() db.cursor().callproc('test4') cache.append(db) del db for i in range(5): con = pool._shared_cache[i] assert con.shared == 7 con = con.con assert con._usage == 28 assert con._con.num_queries == 14 del cache if shareable: assert len(pool._idle_cache) == 5 assert len(pool._shared_cache) == 0 else: assert len(pool._idle_cache) == 35 @pytest.mark.parametrize("threadsafety", [1, 2]) def test_rollback(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety pool = PooledDB(dbapi, 0, 1) assert len(pool._idle_cache) == 0 db = pool.connection(False) assert len(pool._idle_cache) == 0 assert db._con._con.open_cursors == 0 cursor = db.cursor() assert db._con._con.open_cursors == 1 cursor.execute('set doit1') db.commit() cursor.execute('set dont1') cursor.close() assert db._con._con.open_cursors == 0 del db assert len(pool._idle_cache) == 1 db = pool.connection(False) assert len(pool._idle_cache) == 0 assert db._con._con.open_cursors == 0 cursor = db.cursor() assert db._con._con.open_cursors == 1 cursor.execute('set doit2') cursor.close() assert db._con._con.open_cursors == 0 db.commit() session = db._con._con.session db.close() assert session == [ 'doit1', 'commit', 'dont1', 'rollback', 'doit2', 'commit', 'rollback'] @pytest.mark.parametrize("threadsafety", [1, 2]) def test_maxconnections(dbapi, threadsafety): # noqa: F811, PLR0915 dbapi.threadsafety = threadsafety shareable = threadsafety > 1 pool = PooledDB(dbapi, 1, 2, 2, 3) assert hasattr(pool, '_maxconnections') assert pool._maxconnections == 3 assert hasattr(pool, '_connections') assert pool._connections == 0 assert len(pool._idle_cache) == 1 cache = [] for _i in range(3): cache.append(pool.connection(False)) assert pool._connections == 3 assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 with pytest.raises(TooManyConnectionsError): pool.connection(False) with pytest.raises(TooManyConnectionsError): pool.connection() cache = [] assert pool._connections == 0 assert len(pool._idle_cache) == 2 if shareable: assert len(pool._shared_cache) == 0 for _i in range(3): cache.append(pool.connection()) assert len(pool._idle_cache) == 0 if shareable: assert pool._connections == 2 assert len(pool._shared_cache) == 2 cache.append(pool.connection(False)) assert pool._connections == 3 assert len(pool._shared_cache) == 2 else: assert pool._connections == 3 with pytest.raises(TooManyConnectionsError): pool.connection(False) if shareable: cache.append(pool.connection(True)) assert pool._connections == 3 else: with pytest.raises(TooManyConnectionsError): pool.connection() del cache assert pool._connections == 0 assert len(pool._idle_cache) == 2 pool = PooledDB(dbapi, 0, 1, 1, 1) assert pool._maxconnections == 1 assert pool._connections == 0 assert len(pool._idle_cache) == 0 db = pool.connection(False) assert pool._connections == 1 assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 with pytest.raises(TooManyConnectionsError): pool.connection(False) with pytest.raises(TooManyConnectionsError): pool.connection() assert db del db assert pool._connections == 0 assert len(pool._idle_cache) == 1 cache = [pool.connection()] assert pool._connections == 1 assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 1 cache.append(pool.connection()) assert pool._connections == 1 assert len(pool._shared_cache) == 1 assert pool._shared_cache[0].shared == 2 else: with pytest.raises(TooManyConnectionsError): pool.connection() with pytest.raises(TooManyConnectionsError): pool.connection(False) if shareable: cache.append(pool.connection(True)) assert pool._connections == 1 assert len(pool._shared_cache) == 1 assert pool._shared_cache[0].shared == 3 else: with pytest.raises(TooManyConnectionsError): pool.connection(True) del cache assert pool._connections == 0 assert len(pool._idle_cache) == 1 if shareable: assert len(pool._shared_cache) == 0 db = pool.connection(False) assert pool._connections == 1 assert len(pool._idle_cache) == 0 assert db del db assert pool._connections == 0 assert len(pool._idle_cache) == 1 pool = PooledDB(dbapi, 1, 2, 2, 1) assert pool._maxconnections == 2 assert pool._connections == 0 assert len(pool._idle_cache) == 1 cache = [pool.connection(False)] assert pool._connections == 1 assert len(pool._idle_cache) == 0 cache.append(pool.connection(False)) assert pool._connections == 2 assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 with pytest.raises(TooManyConnectionsError): pool.connection(False) with pytest.raises(TooManyConnectionsError): pool.connection() pool = PooledDB(dbapi, 4, 3, 2, 1, False) assert pool._maxconnections == 4 assert pool._connections == 0 assert len(pool._idle_cache) == 4 cache = [] for _i in range(4): cache.append(pool.connection(False)) assert pool._connections == 4 assert len(pool._idle_cache) == 0 with pytest.raises(TooManyConnectionsError): pool.connection(False) with pytest.raises(TooManyConnectionsError): pool.connection() pool = PooledDB(dbapi, 1, 2, 3, 4, False) assert pool._maxconnections == 4 assert pool._connections == 0 assert len(pool._idle_cache) == 1 for _i in range(4): cache.append(pool.connection()) assert len(pool._idle_cache) == 0 if shareable: assert pool._connections == 3 assert len(pool._shared_cache) == 3 cache.append(pool.connection()) assert pool._connections == 3 cache.append(pool.connection(False)) assert pool._connections == 4 else: assert pool._connections == 4 with pytest.raises(TooManyConnectionsError): pool.connection() with pytest.raises(TooManyConnectionsError): pool.connection(False) pool = PooledDB(dbapi, 0, 0, 3, 3, False) assert pool._maxconnections == 3 assert pool._connections == 0 cache = [] for _i in range(3): cache.append(pool.connection(False)) assert pool._connections == 3 with pytest.raises(TooManyConnectionsError): pool.connection(False) with pytest.raises(TooManyConnectionsError): pool.connection(True) cache = [] assert pool._connections == 0 for _i in range(3): cache.append(pool.connection()) assert pool._connections == 3 if shareable: for _i in range(3): cache.append(pool.connection()) assert pool._connections == 3 else: with pytest.raises(TooManyConnectionsError): pool.connection() with pytest.raises(TooManyConnectionsError): pool.connection(False) pool = PooledDB(dbapi, 0, 0, 3) assert pool._maxconnections == 0 assert pool._connections == 0 cache = [] for _i in range(10): cache.append(pool.connection(False)) cache.append(pool.connection()) if shareable: assert pool._connections == 13 assert len(pool._shared_cache) == 3 else: assert pool._connections == 20 pool = PooledDB(dbapi, 1, 1, 1, 1, True) assert pool._maxconnections == 1 assert pool._connections == 0 assert len(pool._idle_cache) == 1 db = pool.connection(False) assert pool._connections == 1 assert len(pool._idle_cache) == 0 def connection(): db = pool.connection() cursor = db.cursor() cursor.execute('set thread') cursor.close() db.close() thread = Thread(target=connection) thread.start() thread.join(0.1) assert thread.is_alive() assert pool._connections == 1 assert len(pool._idle_cache) == 0 if shareable: assert len(pool._shared_cache) == 0 session = db._con._con.session assert session == ['rollback'] del db thread.join(0.1) assert not thread.is_alive() assert pool._connections == 0 assert len(pool._idle_cache) == 1 if shareable: assert len(pool._shared_cache) == 0 db = pool.connection(False) assert pool._connections == 1 assert len(pool._idle_cache) == 0 assert session == ['rollback', 'rollback', 'thread', 'rollback'] assert db del db @pytest.mark.parametrize("threadsafety", [1, 2]) @pytest.mark.parametrize("maxusage", [0, 3, 7]) def test_maxusage(dbapi, threadsafety, maxusage): # noqa: F811 dbapi.threadsafety = threadsafety pool = PooledDB(dbapi, 0, 0, 0, 1, False, maxusage) assert pool._maxusage == maxusage assert len(pool._idle_cache) == 0 db = pool.connection(False) assert db._con._maxusage == maxusage assert len(pool._idle_cache) == 0 assert db._con._con.open_cursors == 0 assert db._usage == 0 assert db._con._con.num_uses == 0 assert db._con._con.num_queries == 0 for i in range(20): cursor = db.cursor() assert db._con._con.open_cursors == 1 cursor.execute(f'select test{i}') r = cursor.fetchone() assert r == f'test{i}' cursor.close() assert db._con._con.open_cursors == 0 j = i % maxusage + 1 if maxusage else i + 1 assert db._usage == j assert db._con._con.num_uses == j assert db._con._con.num_queries == j db.cursor().callproc('test') assert db._con._con.open_cursors == 0 assert db._usage == j + 1 assert db._con._con.num_uses == j + 1 assert db._con._con.num_queries == j @pytest.mark.parametrize("threadsafety", [1, 2]) def test_setsession(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety setsession = ('set time zone', 'set datestyle') pool = PooledDB(dbapi, 0, 0, 0, 1, False, None, setsession) assert pool._setsession == setsession db = pool.connection(False) assert db._setsession_sql == setsession assert db._con._con.session == ['time zone', 'datestyle'] db.cursor().execute('select test') db.cursor().execute('set test1') assert db._usage == 2 assert db._con._con.num_uses == 4 assert db._con._con.num_queries == 1 assert db._con._con.session == ['time zone', 'datestyle', 'test1'] db.close() db = pool.connection(False) assert db._setsession_sql == setsession assert db._con._con.session == \ ['time zone', 'datestyle', 'test1', 'rollback'] db._con._con.close() db.cursor().execute('select test') db.cursor().execute('set test2') assert db._con._con.session == ['time zone', 'datestyle', 'test2'] @pytest.mark.parametrize("threadsafety", [1, 2]) def test_one_thread_two_connections(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety shareable = threadsafety > 1 pool = PooledDB(dbapi, 2) db1 = pool.connection() for _i in range(5): db1.cursor().execute('select test') db2 = pool.connection() assert db1 != db2 assert db1._con != db2._con for _i in range(7): db2.cursor().execute('select test') assert db1._con._con.num_queries == 5 assert db2._con._con.num_queries == 7 del db1 db1 = pool.connection() assert db1 != db2 assert db1._con != db2._con for _i in range(3): db1.cursor().execute('select test') assert db1._con._con.num_queries == 8 db2.cursor().execute('select test') assert db2._con._con.num_queries == 8 pool = PooledDB(dbapi, 0, 0, 2) db1 = pool.connection() for _i in range(5): db1.cursor().execute('select test') db2 = pool.connection() assert db1 != db2 assert db1._con != db2._con for _i in range(7): db2.cursor().execute('select test') assert db1._con._con.num_queries == 5 assert db2._con._con.num_queries == 7 del db1 db1 = pool.connection() assert db1 != db2 assert db1._con != db2._con for _i in range(3): db1.cursor().execute('select test') assert db1._con._con.num_queries == 8 db2.cursor().execute('select test') assert db2._con._con.num_queries == 8 pool = PooledDB(dbapi, 0, 0, 1) db1 = pool.connection() db2 = pool.connection() assert db1 != db2 if shareable: assert db1._con == db2._con else: assert db1._con != db2._con del db1 db1 = pool.connection(False) assert db1 != db2 assert db1._con != db2._con @pytest.mark.parametrize("threadsafety", [1, 2]) def test_three_threads_two_connections(dbapi, threadsafety): # noqa: F811 dbapi.threadsafety = threadsafety pool = PooledDB(dbapi, 2, 2, 0, 2, True) queue = Queue(3) def connection(): queue.put(pool.connection(), timeout=1) for _i in range(3): Thread(target=connection).start() db1 = queue.get(timeout=1) db2 = queue.get(timeout=1) assert db1 != db2 db1_con = db1._con db2_con = db2._con assert db1_con != db2_con with pytest.raises(Empty): queue.get(timeout=0.1) del db1 db1 = queue.get(timeout=1) assert db1 != db2 assert db1._con != db2._con assert db1._con == db1_con pool = PooledDB(dbapi, 2, 2, 1, 2, True) db1 = pool.connection(False) db2 = pool.connection(False) assert db1 != db2 db1_con = db1._con db2_con = db2._con assert db1_con != db2_con Thread(target=connection).start() with pytest.raises(Empty): queue.get(timeout=0.1) del db1 db1 = queue.get(timeout=1) assert db1 != db2 assert db1._con != db2._con assert db1._con == db1_con def test_ping_check(dbapi): # noqa: F811 con_cls = dbapi.Connection con_cls.has_ping = True con_cls.num_pings = 0 pool = PooledDB(dbapi, 1, 1, 0, 0, False, None, None, True, None, 0) db = pool.connection() assert db._con._con.valid assert con_cls.num_pings == 0 db._con.close() db.close() db = pool.connection() assert not db._con._con.valid assert con_cls.num_pings == 0 pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 0) db = pool.connection() assert db._con._con.valid assert con_cls.num_pings == 0 db._con.close() db = pool.connection() assert not db._con._con.valid assert con_cls.num_pings == 0 pool = PooledDB(dbapi, 1, 1, 0, 0, False, None, None, True, None, 1) db = pool.connection() assert db._con._con.valid assert con_cls.num_pings == 1 db._con.close() db.close() db = pool.connection() assert db._con._con.valid assert con_cls.num_pings == 2 pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 1) db = pool.connection() assert db._con._con.valid assert con_cls.num_pings == 3 db._con.close() db = pool.connection() assert db._con._con.valid assert con_cls.num_pings == 4 pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 2) db = pool.connection() assert db._con._con.valid assert con_cls.num_pings == 4 db._con.close() db = pool.connection() assert not db._con._con.valid assert con_cls.num_pings == 4 db.cursor() assert db._con._con.valid assert con_cls.num_pings == 5 pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 4) db = pool.connection() assert db._con._con.valid assert con_cls.num_pings == 5 db._con.close() db = pool.connection() assert not db._con._con.valid assert con_cls.num_pings == 5 cursor = db.cursor() db._con.close() assert not db._con._con.valid assert con_cls.num_pings == 5 cursor.execute('select test') assert db._con._con.valid assert con_cls.num_pings == 6 con_cls.has_ping = False con_cls.num_pings = 0 def test_failed_transaction(dbapi): # noqa: F811 pool = PooledDB(dbapi, 0, 1, 1) db = pool.connection() cursor = db.cursor() db._con._con.close() cursor.execute('select test') db.begin() db._con._con.close() with pytest.raises(dbapi.InternalError): cursor.execute('select test') cursor.execute('select test') db.begin() db.cancel() db._con._con.close() cursor.execute('select test') pool = PooledDB(dbapi, 1, 1, 0) db = pool.connection() cursor = db.cursor() db._con._con.close() cursor.execute('select test') db.begin() db._con._con.close() with pytest.raises(dbapi.InternalError): cursor.execute('select test') cursor.execute('select test') db.begin() db.cancel() db._con._con.close() cursor.execute('select test') def test_shared_in_transaction(dbapi): # noqa: F811 pool = PooledDB(dbapi, 0, 1, 1) db = pool.connection() db.begin() pool.connection(False) with pytest.raises(TooManyConnectionsError): pool.connection() pool = PooledDB(dbapi, 0, 2, 2) db1 = pool.connection() db2 = pool.connection() assert db2._con is not db1._con db2.close() db2 = pool.connection() assert db2._con is not db1._con db = pool.connection() assert db._con is db1._con db.close() db1.begin() db = pool.connection() assert db._con is db2._con db.close() db2.begin() pool.connection(False) with pytest.raises(TooManyConnectionsError): pool.connection() db1.rollback() db = pool.connection() assert db._con is db1._con def test_reset_transaction(dbapi): # noqa: F811 pool = PooledDB(dbapi, 1, 1, 0) db = pool.connection() db.begin() con = db._con assert con._transaction assert con._con.session == ['rollback'] db.close() assert pool.connection()._con is con assert not con._transaction assert con._con.session == ['rollback'] * 3 pool = PooledDB(dbapi, 1, 1, 0, reset=False) db = pool.connection() db.begin() con = db._con assert con._transaction assert con._con.session == [] db.close() assert pool.connection()._con is con assert not con._transaction assert con._con.session == ['rollback'] def test_context_manager(dbapi): # noqa: F811 pool = PooledDB(dbapi, 1, 1, 1) con = pool._idle_cache[0]._con with pool.connection() as db: assert hasattr(db, '_shared_con') assert not pool._idle_cache assert con.valid with db.cursor() as cursor: assert con.open_cursors == 1 cursor.execute('select test') r = cursor.fetchone() assert con.open_cursors == 0 assert r == 'test' assert con.num_queries == 1 assert pool._idle_cache with pool.dedicated_connection() as db: assert not hasattr(db, '_shared_con') assert not pool._idle_cache with db.cursor() as cursor: assert con.open_cursors == 1 cursor.execute('select test') r = cursor.fetchone() assert con.open_cursors == 0 assert r == 'test' assert con.num_queries == 2 assert pool._idle_cache def test_shared_db_connection_create(dbapi): # noqa: F811 db_con = dbapi.connect() con = SharedDBConnection(db_con) assert con.con == db_con assert con.shared == 1 def test_shared_db_connection_share_and_unshare(dbapi): # noqa: F811 con = SharedDBConnection(dbapi.connect()) assert con.shared == 1 con.share() assert con.shared == 2 con.share() assert con.shared == 3 con.unshare() assert con.shared == 2 con.unshare() assert con.shared == 1 def test_shared_db_connection_compare(dbapi): # noqa: F811 con1 = SharedDBConnection(dbapi.connect()) con1.con._transaction = False con2 = SharedDBConnection(dbapi.connect()) con2.con._transaction = False assert con1 == con2 assert con1 <= con2 assert con1 >= con2 assert not con1 != con2 # noqa: SIM202 assert not con1 < con2 assert not con1 > con2 con2.share() assert not con1 == con2 # noqa: SIM201 assert con1 <= con2 assert not con1 >= con2 assert con1 != con2 assert con1 < con2 assert not con1 > con2 con1.con._transaction = True assert not con1 == con2 # noqa: SIM201 assert not con1 <= con2 assert con1 >= con2 assert con1 != con2 assert not con1 < con2 assert con1 > con2 WebwareForPython-DBUtils-ed2a1f2/tests/test_pooled_pg.py000066400000000000000000000222301457556542700235170ustar00rootroot00000000000000"""Test the PooledPg module. Note: We don't test performance here, so the test does not predicate whether PooledPg actually will help in improving performance or not. We also assume that the underlying SteadyPg connections are tested. Copyright and credit info: * This test was contributed by Christoph Zwerschke """ from queue import Empty, Queue from threading import Thread import pg # noqa: F401 import pytest from dbutils.pooled_pg import ( InvalidConnectionError, PooledPg, TooManyConnectionsError, ) from dbutils.steady_pg import SteadyPgConnection def test_version(): from dbutils import __version__, pooled_pg assert pooled_pg.__version__ == __version__ assert PooledPg.version == __version__ def test_create_connection(): pool = PooledPg( 1, 1, 0, False, None, None, False, 'PooledPgTestDB', user='PooledPgTestUser') assert hasattr(pool, '_cache') assert pool._cache.qsize() == 1 assert hasattr(pool, '_maxusage') assert pool._maxusage is None assert hasattr(pool, '_setsession') assert pool._setsession is None assert hasattr(pool, '_reset') assert not pool._reset db_con = pool._cache.get(0) pool._cache.put(db_con, 0) assert isinstance(db_con, SteadyPgConnection) db = pool.connection() assert pool._cache.qsize() == 0 assert hasattr(db, '_con') assert db._con == db_con assert hasattr(db, 'query') assert hasattr(db, 'num_queries') assert db.num_queries == 0 assert hasattr(db, '_maxusage') assert db._maxusage == 0 assert hasattr(db, '_setsession_sql') assert db._setsession_sql is None assert hasattr(db, 'dbname') assert db.dbname == 'PooledPgTestDB' assert hasattr(db, 'user') assert db.user == 'PooledPgTestUser' db.query('select test') assert db.num_queries == 1 pool = PooledPg(1) db = pool.connection() assert hasattr(db, 'dbname') assert db.dbname is None assert hasattr(db, 'user') assert db.user is None assert hasattr(db, 'num_queries') assert db.num_queries == 0 pool = PooledPg(0, 0, 0, False, 3, ('set datestyle',)) assert pool._maxusage == 3 assert pool._setsession == ('set datestyle',) db = pool.connection() assert db._maxusage == 3 assert db._setsession_sql == ('set datestyle',) def test_close_connection(): pool = PooledPg( 0, 1, 0, False, None, None, False, 'PooledPgTestDB', user='PooledPgTestUser') db = pool.connection() assert hasattr(db, '_con') db_con = db._con assert isinstance(db_con, SteadyPgConnection) assert hasattr(pool, '_cache') assert pool._cache.qsize() == 0 assert db.num_queries == 0 db.query('select test') assert db.num_queries == 1 db.close() with pytest.raises(InvalidConnectionError): assert db.num_queries db = pool.connection() assert hasattr(db, 'dbname') assert db.dbname == 'PooledPgTestDB' assert hasattr(db, 'user') assert db.user == 'PooledPgTestUser' assert db.num_queries == 1 db.query('select test') assert db.num_queries == 2 db = pool.connection() assert pool._cache.qsize() == 1 assert pool._cache.get(0) == db_con assert db del db def test_min_max_cached(): pool = PooledPg(3) assert hasattr(pool, '_cache') assert pool._cache.qsize() == 3 cache = [pool.connection() for _i in range(3)] assert pool._cache.qsize() == 0 for _i in range(3): cache.pop().close() assert pool._cache.qsize() == 3 for _i in range(6): cache.append(pool.connection()) assert pool._cache.qsize() == 0 for _i in range(6): cache.pop().close() assert pool._cache.qsize() == 6 pool = PooledPg(3, 4) assert hasattr(pool, '_cache') assert pool._cache.qsize() == 3 cache = [pool.connection() for _i in range(3)] assert pool._cache.qsize() == 0 for _i in range(3): cache.pop().close() assert pool._cache.qsize() == 3 for _i in range(6): cache.append(pool.connection()) assert pool._cache.qsize() == 0 for _i in range(6): cache.pop().close() assert pool._cache.qsize() == 4 pool = PooledPg(3, 2) assert hasattr(pool, '_cache') assert pool._cache.qsize() == 3 cache = [pool.connection() for _i in range(4)] assert pool._cache.qsize() == 0 for _i in range(4): cache.pop().close() assert pool._cache.qsize() == 3 pool = PooledPg(2, 5) assert hasattr(pool, '_cache') assert pool._cache.qsize() == 2 cache = [pool.connection() for _i in range(10)] assert pool._cache.qsize() == 0 for _i in range(10): cache.pop().close() assert pool._cache.qsize() == 5 def test_max_connections(): pool = PooledPg(1, 2, 3) assert pool._cache.qsize() == 1 cache = [pool.connection() for _i in range(3)] assert pool._cache.qsize() == 0 with pytest.raises(TooManyConnectionsError): pool.connection() pool = PooledPg(0, 1, 1, False) assert pool._blocking == 0 assert pool._cache.qsize() == 0 db = pool.connection() assert pool._cache.qsize() == 0 with pytest.raises(TooManyConnectionsError): pool.connection() assert db del db assert cache del cache pool = PooledPg(1, 2, 1) assert pool._cache.qsize() == 1 cache = [pool.connection()] assert pool._cache.qsize() == 0 cache.append(pool.connection()) assert pool._cache.qsize() == 0 with pytest.raises(TooManyConnectionsError): pool.connection() pool = PooledPg(3, 2, 1, False) assert pool._cache.qsize() == 3 cache = [pool.connection() for _i in range(3)] assert len(cache) == 3 assert pool._cache.qsize() == 0 with pytest.raises(TooManyConnectionsError): pool.connection() pool = PooledPg(1, 1, 1, True) assert pool._blocking == 1 assert pool._cache.qsize() == 1 db = pool.connection() assert pool._cache.qsize() == 0 def connection(): pool.connection().query('set thread') thread = Thread(target=connection) thread.start() thread.join(0.1) assert thread.is_alive() assert pool._cache.qsize() == 0 session = db._con.session assert session == [] del db thread.join(0.1) assert not thread.is_alive() assert pool._cache.qsize() == 1 db = pool.connection() assert pool._cache.qsize() == 0 assert session == ['thread'] assert db del db def test_one_thread_two_connections(): pool = PooledPg(2) db1 = pool.connection() for _i in range(5): db1.query('select test') db2 = pool.connection() assert db1 != db2 assert db1._con != db2._con for _i in range(7): db2.query('select test') assert db1.num_queries == 5 assert db2.num_queries == 7 del db1 db1 = pool.connection() assert db1 != db2 assert db1._con != db2._con assert hasattr(db1, 'query') for _i in range(3): db1.query('select test') assert db1.num_queries == 8 db2.query('select test') assert db2.num_queries == 8 def test_three_threads_two_connections(): pool = PooledPg(2, 2, 2, True) queue = Queue(3) def connection(): queue.put(pool.connection(), timeout=1) for _i in range(3): Thread(target=connection).start() db1 = queue.get(timeout=1) db2 = queue.get(timeout=1) db1_con = db1._con db2_con = db2._con assert db1 != db2 assert db1_con != db2_con with pytest.raises(Empty): queue.get(timeout=0.1) del db1 db1 = queue.get(timeout=1) assert db1 != db2 assert db1._con != db2._con assert db1._con == db1_con def test_reset_transaction(): pool = PooledPg(1) db = pool.connection() db.begin() con = db._con assert con._transaction db.query('select test') assert con.num_queries == 1 db.close() assert pool.connection()._con is con assert not con._transaction assert con.session == ['begin', 'rollback'] assert con.num_queries == 1 pool = PooledPg(1, reset=1) db = pool.connection() db.begin() con = db._con assert con._transaction assert con.session == ['rollback', 'begin'] db.query('select test') assert con.num_queries == 1 db.close() assert pool.connection()._con is con assert not con._transaction assert con.session == ['rollback', 'begin', 'rollback', 'rollback'] assert con.num_queries == 1 pool = PooledPg(1, reset=2) db = pool.connection() db.begin() con = db._con assert con._transaction assert con.session == ['begin'] db.query('select test') assert con.num_queries == 1 db.close() assert pool.connection()._con is con assert not con._transaction assert con.session == [] assert con.num_queries == 0 def test_context_manager(): pool = PooledPg(1, 1, 1) with pool.connection() as db: db_con = db._con._con db.query('select test') assert db_con.num_queries == 1 with pytest.raises(TooManyConnectionsError): pool.connection() with pool.connection() as db: db_con = db._con._con db.query('select test') assert db_con.num_queries == 2 with pytest.raises(TooManyConnectionsError): pool.connection() WebwareForPython-DBUtils-ed2a1f2/tests/test_simple_pooled_db.py000066400000000000000000000077561457556542700250670ustar00rootroot00000000000000"""Test the SimplePooledDB module. Note: We don't test performance here, so the test does not predicate whether SimplePooledDB actually will help in improving performance or not. We also do not test any real world DB-API 2 module, we just mock the basic connection functionality of an arbitrary module. Copyright and credit info: * This test was contributed by Christoph Zwerschke """ from queue import Empty, Queue from threading import Thread import pytest from dbutils import simple_pooled_db from . import mock_db as dbapi def my_db_pool(threadsafety, max_connections): """Get simple PooledDB connection.""" dbapi_threadsafety = dbapi.threadsafety dbapi.threadsafety = threadsafety try: return simple_pooled_db.PooledDB( dbapi, max_connections, 'SimplePooledDBTestDB', 'SimplePooledDBTestUser') finally: dbapi.threadsafety = dbapi_threadsafety def test_version(): from dbutils import __version__ assert simple_pooled_db.__version__ == __version__ assert simple_pooled_db.PooledDB.version == __version__ @pytest.mark.parametrize("threadsafety", [None, -1, 0, 4]) def test_no_threadsafety(threadsafety): with pytest.raises(simple_pooled_db.NotSupportedError): my_db_pool(threadsafety, 1) @pytest.mark.parametrize("threadsafety", [1, 2, 3]) def test_create_connection(threadsafety): dbpool = my_db_pool(threadsafety, 1) db = dbpool.connection() assert hasattr(db, 'cursor') assert hasattr(db, 'open_cursors') assert db.open_cursors == 0 assert hasattr(db, 'database') assert db.database == 'SimplePooledDBTestDB' assert hasattr(db, 'user') assert db.user == 'SimplePooledDBTestUser' cursor = db.cursor() assert cursor is not None assert db.open_cursors == 1 del cursor @pytest.mark.parametrize("threadsafety", [1, 2, 3]) def test_close_connection(threadsafety): db_pool = my_db_pool(threadsafety, 1) db = db_pool.connection() assert db.open_cursors == 0 cursor1 = db.cursor() assert cursor1 is not None assert db.open_cursors == 1 db.close() assert not hasattr(db, 'open_cursors') db = db_pool.connection() assert hasattr(db, 'database') assert db.database == 'SimplePooledDBTestDB' assert hasattr(db, 'user') assert db.user == 'SimplePooledDBTestUser' assert db.open_cursors == 1 cursor2 = db.cursor() assert cursor2 is not None assert db.open_cursors == 2 del cursor2 del cursor1 @pytest.mark.parametrize("threadsafety", [1, 2, 3]) def test_two_connections(threadsafety): db_pool = my_db_pool(threadsafety, 2) db1 = db_pool.connection() cursors1 = [db1.cursor() for _i_ in range(5)] db2 = db_pool.connection() assert db1 != db2 cursors2 = [db2.cursor() for _i in range(7)] assert db1.open_cursors == 5 assert db2.open_cursors == 7 db1.close() db1 = db_pool.connection() assert db1 != db2 assert hasattr(db1, 'cursor') for _i in range(3): cursors1.append(db1.cursor()) assert db1.open_cursors == 8 cursors2.append(db2.cursor()) assert db2.open_cursors == 8 del cursors2 del cursors1 def test_threadsafety_1(): db_pool = my_db_pool(1, 2) queue = Queue(3) def connection(): queue.put(db_pool.connection()) threads = [Thread(target=connection).start() for _i in range(3)] assert len(threads) == 3 db1 = queue.get(timeout=1) db2 = queue.get(timeout=1) assert db1 != db2 assert db1._con != db2._con with pytest.raises(Empty): queue.get(timeout=0.1) db2.close() db3 = queue.get(timeout=1) assert db1 != db3 assert db1._con != db3._con @pytest.mark.parametrize("threadsafety", [2, 3]) def test_threadsafety_2(threadsafety): dbpool = my_db_pool(threadsafety, 2) db1 = dbpool.connection() db2 = dbpool.connection() cursors = [dbpool.connection().cursor() for _i in range(100)] assert db1.open_cursors == 50 assert db2.open_cursors == 50 assert cursors del cursors WebwareForPython-DBUtils-ed2a1f2/tests/test_simple_pooled_pg.py000066400000000000000000000053771457556542700251050ustar00rootroot00000000000000"""Test the SimplePooledPg module. Note: We don't test performance here, so the test does not predicate whether SimplePooledPg actually will help in improving performance or not. Copyright and credit info: * This test was contributed by Christoph Zwerschke """ from queue import Empty, Queue from threading import Thread import pg # noqa: F401 import pytest from dbutils import simple_pooled_pg def my_db_pool(max_connections): """Get simple PooledPg connection.""" return simple_pooled_pg.PooledPg( max_connections, 'SimplePooledPgTestDB', 'SimplePooledPgTestUser') def test_version(): from dbutils import __version__ assert simple_pooled_pg.__version__ == __version__ assert simple_pooled_pg.PooledPg.version == __version__ def test_create_connection(): db_pool = my_db_pool(1) db = db_pool.connection() assert hasattr(db, 'query') assert hasattr(db, 'num_queries') assert db.num_queries == 0 assert hasattr(db, 'dbname') assert db.dbname == 'SimplePooledPgTestDB' assert hasattr(db, 'user') assert db.user == 'SimplePooledPgTestUser' db.query('select 1') assert db.num_queries == 1 def test_close_connection(): db_pool = my_db_pool(1) db = db_pool.connection() assert db.num_queries == 0 db.query('select 1') assert db.num_queries == 1 db.close() assert not hasattr(db, 'num_queries') db = db_pool.connection() assert hasattr(db, 'dbname') assert db.dbname == 'SimplePooledPgTestDB' assert hasattr(db, 'user') assert db.user == 'SimplePooledPgTestUser' assert db.num_queries == 1 db.query('select 1') assert db.num_queries == 2 def test_two_connections(): db_pool = my_db_pool(2) db1 = db_pool.connection() for _i in range(5): db1.query('select 1') db2 = db_pool.connection() assert db1 != db2 assert db1._con != db2._con for _i in range(7): db2.query('select 1') assert db1.num_queries == 5 assert db2.num_queries == 7 db1.close() db1 = db_pool.connection() assert db1 != db2 assert db1._con != db2._con assert hasattr(db1, 'query') for _i in range(3): db1.query('select 1') assert db1.num_queries == 8 db2.query('select 1') assert db2.num_queries == 8 def test_threads(): db_pool = my_db_pool(2) queue = Queue(3) def connection(): queue.put(db_pool.connection()) threads = [Thread(target=connection).start() for _i in range(3)] assert len(threads) == 3 db1 = queue.get(timeout=1) db2 = queue.get(timeout=1) assert db1 != db2 assert db1._con != db2._con with pytest.raises(Empty): queue.get(timeout=0.1) db2.close() db3 = queue.get(timeout=1) assert db1 != db3 assert db1._con != db3._con WebwareForPython-DBUtils-ed2a1f2/tests/test_steady_db.py000066400000000000000000000527431457556542700235210ustar00rootroot00000000000000"""Test the SteadyDB module. Note: We do not test any real DB-API 2 module, but we just mock the basic DB-API 2 connection functionality. Copyright and credit info: * This test was contributed by Christoph Zwerschke """ import pytest from dbutils.steady_db import SteadyDBConnection, SteadyDBCursor from dbutils.steady_db import connect as steady_db_connect from . import mock_db as dbapi def test_version(): from dbutils import __version__, steady_db assert steady_db.__version__ == __version__ assert steady_db.SteadyDBConnection.version == __version__ def test_mocked_connection(): db = dbapi.connect( 'SteadyDBTestDB', user='SteadyDBTestUser') db.__class__.has_ping = False db.__class__.num_pings = 0 assert hasattr(db, 'database') assert db.database == 'SteadyDBTestDB' assert hasattr(db, 'user') assert db.user == 'SteadyDBTestUser' assert hasattr(db, 'cursor') assert hasattr(db, 'close') assert hasattr(db, 'open_cursors') assert hasattr(db, 'num_uses') assert hasattr(db, 'num_queries') assert hasattr(db, 'session') assert hasattr(db, 'valid') assert db.valid assert db.open_cursors == 0 for _i in range(3): cursor = db.cursor() assert db.open_cursors == 1 cursor.close() assert db.open_cursors == 0 cursor = [] for i in range(3): cursor.append(db.cursor()) assert db.open_cursors == i + 1 del cursor assert db.open_cursors == 0 cursor = db.cursor() assert hasattr(cursor, 'execute') assert hasattr(cursor, 'fetchone') assert hasattr(cursor, 'callproc') assert hasattr(cursor, 'close') assert hasattr(cursor, 'valid') assert cursor.valid assert db.open_cursors == 1 for i in range(3): assert db.num_uses == i assert db.num_queries == i cursor.execute(f'select test{i}') assert cursor.fetchone() == f'test{i}' assert cursor.valid assert db.open_cursors == 1 for _i in range(4): cursor.callproc('test') cursor.close() assert not cursor.valid assert db.open_cursors == 0 assert db.num_uses == 7 assert db.num_queries == 3 with pytest.raises(dbapi.InternalError): cursor.close() with pytest.raises(dbapi.InternalError): cursor.execute('select test') assert db.valid assert not db.__class__.has_ping assert db.__class__.num_pings == 0 with pytest.raises(AttributeError): db.ping() assert db.__class__.num_pings == 1 db.__class__.has_ping = True assert db.ping() is None assert db.__class__.num_pings == 2 db.close() assert not db.valid assert db.num_uses == 0 assert db.num_queries == 0 with pytest.raises(dbapi.InternalError): db.close() with pytest.raises(dbapi.InternalError): db.cursor() with pytest.raises(dbapi.OperationalError): db.ping() assert db.__class__.num_pings == 3 db.__class__.has_ping = False db.__class__.num_pings = 0 def test_broken_connection(): with pytest.raises(TypeError): SteadyDBConnection(None) with pytest.raises(TypeError): SteadyDBCursor(None) db = steady_db_connect(dbapi, database='ok') for _i in range(3): db.close() del db with pytest.raises(dbapi.OperationalError): steady_db_connect(dbapi, database='error') db = steady_db_connect(dbapi, database='ok') cursor = db.cursor() for _i in range(3): cursor.close() cursor = db.cursor('ok') for _i in range(3): cursor.close() with pytest.raises(dbapi.OperationalError): db.cursor('error') @pytest.mark.parametrize("closeable", [False, True]) def test_close(closeable): db = steady_db_connect(dbapi, closeable=closeable) assert db._con.valid db.close() assert closeable ^ db._con.valid db.close() assert closeable ^ db._con.valid db._close() assert not db._con.valid db._close() assert not db._con.valid def test_connection(): # noqa: PLR0915 db = steady_db_connect( dbapi, 0, None, None, None, True, 'SteadyDBTestDB', user='SteadyDBTestUser') assert isinstance(db, SteadyDBConnection) assert hasattr(db, '_con') assert hasattr(db, '_usage') assert db._usage == 0 assert hasattr(db._con, 'valid') assert db._con.valid assert hasattr(db._con, 'cursor') assert hasattr(db._con, 'close') assert hasattr(db._con, 'open_cursors') assert hasattr(db._con, 'num_uses') assert hasattr(db._con, 'num_queries') assert hasattr(db._con, 'session') assert hasattr(db._con, 'database') assert db._con.database == 'SteadyDBTestDB' assert hasattr(db._con, 'user') assert db._con.user == 'SteadyDBTestUser' assert hasattr(db, 'cursor') assert hasattr(db, 'close') assert db._con.open_cursors == 0 for _i in range(3): cursor = db.cursor() assert db._con.open_cursors == 1 cursor.close() assert db._con.open_cursors == 0 cursor = [] for i in range(3): cursor.append(db.cursor()) assert db._con.open_cursors == i + 1 del cursor assert db._con.open_cursors == 0 cursor = db.cursor() assert hasattr(cursor, 'execute') assert hasattr(cursor, 'fetchone') assert hasattr(cursor, 'callproc') assert hasattr(cursor, 'close') assert hasattr(cursor, 'valid') assert cursor.valid assert db._con.open_cursors == 1 for i in range(3): assert db._usage == i assert db._con.num_uses == i assert db._con.num_queries == i cursor.execute(f'select test{i}') assert cursor.fetchone() == f'test{i}' assert cursor.valid assert db._con.open_cursors == 1 for _i in range(4): cursor.callproc('test') cursor.close() assert not cursor.valid assert db._con.open_cursors == 0 assert db._usage == 7 assert db._con.num_uses == 7 assert db._con.num_queries == 3 cursor.close() cursor.execute('select test8') assert cursor.valid assert db._con.open_cursors == 1 assert cursor.fetchone() == 'test8' assert db._usage == 8 assert db._con.num_uses == 8 assert db._con.num_queries == 4 assert db._con.valid db.close() assert not db._con.valid assert db._con.open_cursors == 0 assert db._usage == 8 assert db._con.num_uses == 0 assert db._con.num_queries == 0 with pytest.raises(dbapi.InternalError): db._con.close() db.close() with pytest.raises(dbapi.InternalError): db._con.cursor() cursor = db.cursor() assert db._con.valid cursor.execute('select test11') assert cursor.fetchone() == 'test11' cursor.execute('select test12') assert cursor.fetchone() == 'test12' cursor.callproc('test') assert db._usage == 3 assert db._con.num_uses == 3 assert db._con.num_queries == 2 cursor2 = db.cursor() assert db._con.open_cursors == 2 cursor2.execute('select test13') assert cursor2.fetchone() == 'test13' assert db._con.num_queries == 3 db.close() assert db._con.open_cursors == 0 assert db._con.num_queries == 0 cursor = db.cursor() assert cursor.valid cursor.callproc('test') cursor._cursor.valid = False assert not cursor.valid with pytest.raises(dbapi.InternalError): cursor._cursor.callproc('test') cursor.callproc('test') assert cursor.valid cursor._cursor.callproc('test') assert db._usage == 2 assert db._con.num_uses == 3 db._con.valid = cursor._cursor.valid = False cursor.callproc('test') assert cursor.valid assert db._usage == 1 assert db._con.num_uses == 1 cursor.execute('set this') db.commit() cursor.execute('set that') db.rollback() assert db._con.session == ['this', 'commit', 'that', 'rollback'] def test_connection_context_handler(): db = steady_db_connect( dbapi, 0, None, None, None, True, 'SteadyDBTestDB', user='SteadyDBTestUser') assert db._con.session == [] with db as con: con.cursor().execute('select test') assert db._con.session == ['commit'] try: with db as con: con.cursor().execute('error') except dbapi.ProgrammingError: error = True else: error = False assert error assert db._con.session == ['commit', 'rollback'] def test_cursor_context_handler(): db = steady_db_connect( dbapi, 0, None, None, None, True, 'SteadyDBTestDB', user='SteadyDBTestUser') assert db._con.open_cursors == 0 with db.cursor() as cursor: assert db._con.open_cursors == 1 cursor.execute('select test') assert cursor.fetchone() == 'test' assert db._con.open_cursors == 0 def test_cursor_as_iterator_provided(): db = steady_db_connect( dbapi, 0, None, None, None, True, 'SteadyDBTestDB', user='SteadyDBTestUser') assert db._con.open_cursors == 0 cursor = db.cursor() assert db._con.open_cursors == 1 cursor.execute('select test') _cursor = cursor._cursor try: assert not hasattr(_cursor, 'iter') _cursor.__iter__ = lambda: ['test-iter'] assert list(iter(cursor)) == ['test'] finally: del _cursor.__iter__ cursor.close() assert db._con.open_cursors == 0 def test_cursor_as_iterator_created(): db = steady_db_connect( dbapi, 0, None, None, None, True, 'SteadyDBTestDB', user='SteadyDBTestUser') assert db._con.open_cursors == 0 cursor = db.cursor() assert db._con.open_cursors == 1 cursor.execute('select test') assert list(iter(cursor)) == ['test'] cursor.close() assert db._con.open_cursors == 0 def test_connection_creator_function(): db1 = steady_db_connect( dbapi, 0, None, None, None, True, 'SteadyDBTestDB', user='SteadyDBTestUser') db2 = steady_db_connect( dbapi.connect, 0, None, None, None, True, 'SteadyDBTestDB', user='SteadyDBTestUser') assert db1.dbapi() == db2.dbapi() assert db1.threadsafety() == db2.threadsafety() assert db1._creator == db2._creator assert db1._args == db2._args assert db1._kwargs == db2._kwargs db2.close() db1.close() def test_connection_maxusage(): db = steady_db_connect(dbapi, 10) cursor = db.cursor() for i in range(100): cursor.execute(f'select test{i}') r = cursor.fetchone() assert r == f'test{i}' assert db._con.valid j = i % 10 + 1 assert db._usage == j assert db._con.num_uses == j assert db._con.num_queries == j assert db._con.open_cursors == 1 db.begin() for i in range(100): cursor.callproc('test') assert db._con.valid if i == 49: db.commit() j = i % 10 + 1 if i > 49 else i + 11 assert db._usage == j assert db._con.num_uses == j j = 0 if i > 49 else 10 assert db._con.num_queries == j for i in range(10): if i == 7: db._con.valid = cursor._cursor.valid = False cursor.execute(f'select test{i}') r = cursor.fetchone() assert r == f'test{i}' j = i % 7 + 1 assert db._usage == j assert db._con.num_uses == j assert db._con.num_queries == j for i in range(10): if i == 5: db._con.valid = cursor._cursor.valid = False cursor.callproc('test') j = (i + (3 if i < 5 else -5)) % 10 + 1 assert db._usage == j assert db._con.num_uses == j j = 3 if i < 5 else 0 assert db._con.num_queries == j db.close() cursor.execute('select test1') assert cursor.fetchone() == 'test1' assert db._usage == 1 assert db._con.num_uses == 1 assert db._con.num_queries == 1 def test_connection_setsession(): db = steady_db_connect(dbapi, 3, ('set time zone', 'set datestyle')) assert hasattr(db, '_usage') assert db._usage == 0 assert hasattr(db._con, 'open_cursors') assert db._con.open_cursors == 0 assert hasattr(db._con, 'num_uses') assert db._con.num_uses == 2 assert hasattr(db._con, 'num_queries') assert db._con.num_queries == 0 assert hasattr(db._con, 'session') assert tuple(db._con.session) == ('time zone', 'datestyle') for _i in range(11): db.cursor().execute('select test') assert db._con.open_cursors == 0 assert db._usage == 2 assert db._con.num_uses == 4 assert db._con.num_queries == 2 assert db._con.session == ['time zone', 'datestyle'] db.cursor().execute('set test') assert db._con.open_cursors == 0 assert db._usage == 3 assert db._con.num_uses == 5 assert db._con.num_queries == 2 assert db._con.session == ['time zone', 'datestyle', 'test'] db.cursor().execute('select test') assert db._con.open_cursors == 0 assert db._usage == 1 assert db._con.num_uses == 3 assert db._con.num_queries == 1 assert db._con.session == ['time zone', 'datestyle'] db.cursor().execute('set test') assert db._con.open_cursors == 0 assert db._usage == 2 assert db._con.num_uses == 4 assert db._con.num_queries == 1 assert db._con.session == ['time zone', 'datestyle', 'test'] db.cursor().execute('select test') assert db._con.open_cursors == 0 assert db._usage == 3 assert db._con.num_uses == 5 assert db._con.num_queries == 2 assert db._con.session == ['time zone', 'datestyle', 'test'] db.close() db.cursor().execute('set test') assert db._con.open_cursors == 0 assert db._usage == 1 assert db._con.num_uses == 3 assert db._con.num_queries == 0 assert db._con.session == ['time zone', 'datestyle', 'test'] db.close() db.cursor().execute('select test') assert db._con.open_cursors == 0 assert db._usage == 1 assert db._con.num_uses == 3 assert db._con.num_queries == 1 assert db._con.session == ['time zone', 'datestyle'] def test_connection_failures(): db = steady_db_connect(dbapi) db.close() db.cursor() db = steady_db_connect(dbapi, failures=dbapi.InternalError) db.close() db.cursor() db = steady_db_connect(dbapi, failures=dbapi.OperationalError) db.close() with pytest.raises(dbapi.InternalError): db.cursor() db = steady_db_connect(dbapi, failures=( dbapi.OperationalError, dbapi.InterfaceError)) db.close() with pytest.raises(dbapi.InternalError): db.cursor() db = steady_db_connect(dbapi, failures=( dbapi.OperationalError, dbapi.InterfaceError, dbapi.InternalError)) db.close() db.cursor() def test_connection_failure_error(): db = steady_db_connect(dbapi) cursor = db.cursor() db.close() cursor.execute('select test') cursor = db.cursor() db.close() with pytest.raises(dbapi.ProgrammingError): cursor.execute('error') def test_connection_set_sizes(): db = steady_db_connect(dbapi) cursor = db.cursor() cursor.execute('get sizes') result = cursor.fetchone() assert result == ([], {}) cursor.setinputsizes([7, 42, 6]) cursor.setoutputsize(9) cursor.setoutputsize(15, 3) cursor.setoutputsize(42, 7) cursor.execute('get sizes') result = cursor.fetchone() assert result == ([7, 42, 6], {None: 9, 3: 15, 7: 42}) cursor.execute('get sizes') result = cursor.fetchone() assert result == ([], {}) cursor.setinputsizes([6, 42, 7]) cursor.setoutputsize(7) cursor.setoutputsize(15, 3) cursor.setoutputsize(42, 9) db.close() cursor.execute('get sizes') result = cursor.fetchone() assert result == ([6, 42, 7], {None: 7, 3: 15, 9: 42}) def test_connection_ping_check(): con_cls = dbapi.Connection con_cls.has_ping = False con_cls.num_pings = 0 db = steady_db_connect(dbapi) db.cursor().execute('select test') assert con_cls.num_pings == 0 db.close() db.cursor().execute('select test') assert con_cls.num_pings == 0 assert db._ping_check() is None assert con_cls.num_pings == 1 db = steady_db_connect(dbapi, ping=7) db.cursor().execute('select test') assert con_cls.num_pings == 2 db.close() db.cursor().execute('select test') assert con_cls.num_pings == 2 assert db._ping_check() is None assert con_cls.num_pings == 2 con_cls.has_ping = True db = steady_db_connect(dbapi) db.cursor().execute('select test') assert con_cls.num_pings == 2 db.close() db.cursor().execute('select test') assert con_cls.num_pings == 2 assert db._ping_check() assert con_cls.num_pings == 3 db = steady_db_connect(dbapi, ping=1) db.cursor().execute('select test') assert con_cls.num_pings == 3 db.close() db.cursor().execute('select test') assert con_cls.num_pings == 3 assert db._ping_check() assert con_cls.num_pings == 4 db.close() assert db._ping_check() assert con_cls.num_pings == 5 db = steady_db_connect(dbapi, ping=7) db.cursor().execute('select test') assert con_cls.num_pings == 7 db.close() db.cursor().execute('select test') assert con_cls.num_pings == 9 db = steady_db_connect(dbapi, ping=3) assert con_cls.num_pings == 9 db.cursor() assert con_cls.num_pings == 10 db.close() cursor = db.cursor() assert con_cls.num_pings == 11 cursor.execute('select test') assert con_cls.num_pings == 11 db = steady_db_connect(dbapi, ping=5) assert con_cls.num_pings == 11 db.cursor() assert con_cls.num_pings == 11 db.close() cursor = db.cursor() assert con_cls.num_pings == 11 cursor.execute('select test') assert con_cls.num_pings == 12 db.close() cursor = db.cursor() assert con_cls.num_pings == 12 cursor.execute('select test') assert con_cls.num_pings == 13 db = steady_db_connect(dbapi, ping=7) assert con_cls.num_pings == 13 db.cursor() assert con_cls.num_pings == 14 db.close() cursor = db.cursor() assert con_cls.num_pings == 15 cursor.execute('select test') assert con_cls.num_pings == 16 db.close() cursor = db.cursor() assert con_cls.num_pings == 17 cursor.execute('select test') assert con_cls.num_pings == 18 db.close() cursor.execute('select test') assert con_cls.num_pings == 20 con_cls.has_ping = False con_cls.num_pings = 0 def test_begin_transaction(): db = steady_db_connect(dbapi, database='ok') cursor = db.cursor() cursor.close() cursor.execute('select test12') assert cursor.fetchone() == 'test12' db.begin() cursor = db.cursor() cursor.close() with pytest.raises(dbapi.InternalError): cursor.execute('select test12') cursor.execute('select test12') assert cursor.fetchone() == 'test12' db.close() db.begin() with pytest.raises(dbapi.InternalError): cursor.execute('select test12') cursor.execute('select test12') assert cursor.fetchone() == 'test12' db.begin() with pytest.raises(dbapi.ProgrammingError): cursor.execute('error') cursor.close() cursor.execute('select test12') assert cursor.fetchone() == 'test12' def test_with_begin_extension(): db = steady_db_connect(dbapi, database='ok') db._con._begin_called_with = None def begin(a, b=None, c=7): db._con._begin_called_with = (a, b, c) db._con.begin = begin db.begin(42, 6) cursor = db.cursor() cursor.execute('select test13') assert cursor.fetchone() == 'test13' assert db._con._begin_called_with == (42, 6, 7) def test_cancel_transaction(): db = steady_db_connect(dbapi, database='ok') cursor = db.cursor() db.begin() cursor.execute('select test14') assert cursor.fetchone() == 'test14' db.cancel() cursor.execute('select test14') assert cursor.fetchone() == 'test14' def test_with_cancel_extension(): db = steady_db_connect(dbapi, database='ok') db._con._cancel_called = None def cancel(): db._con._cancel_called = 'yes' db._con.cancel = cancel db.begin() cursor = db.cursor() cursor.execute('select test15') assert cursor.fetchone() == 'test15' db.cancel() assert db._con._cancel_called == 'yes' def test_reset_transaction(): db = steady_db_connect(dbapi, database='ok') db.begin() assert not db._con.session db.close() assert not db._con.session db = steady_db_connect(dbapi, database='ok', closeable=False) db.begin() assert not db._con.session db.close() assert db._con.session == ['rollback'] def test_commit_error(): db = steady_db_connect(dbapi, database='ok') db.begin() assert not db._con.session assert db._con.valid db.commit() assert db._con.session == ['commit'] assert db._con.valid db.begin() db._con.valid = False con = db._con with pytest.raises(dbapi.InternalError): db.commit() assert not db._con.session assert db._con.valid assert con is not db._con db.begin() assert not db._con.session assert db._con.valid db.commit() assert db._con.session == ['commit'] assert db._con.valid def test_rollback_error(): db = steady_db_connect(dbapi, database='ok') db.begin() assert not db._con.session assert db._con.valid db.rollback() assert db._con.session == ['rollback'] assert db._con.valid db.begin() db._con.valid = False con = db._con with pytest.raises(dbapi.InternalError): db.rollback() assert not db._con.session assert db._con.valid assert con is not db._con db.begin() assert not db._con.session assert db._con.valid db.rollback() assert db._con.session == ['rollback'] assert db._con.valid WebwareForPython-DBUtils-ed2a1f2/tests/test_steady_pg.py000066400000000000000000000231531457556542700235330ustar00rootroot00000000000000"""Test the SteadyPg module. Note: We do not test the real PyGreSQL module, but we just mock the basic connection functionality of that module. We assume that the PyGreSQL module will detect lost connections correctly and set the status flag accordingly. Copyright and credit info: * This test was contributed by Christoph Zwerschke """ import sys import pg import pytest from dbutils.steady_pg import SteadyPgConnection def test_version(): from dbutils import __version__, steady_pg assert steady_pg.__version__ == __version__ assert steady_pg.SteadyPgConnection.version == __version__ def test_mocked_connection(): db_cls = pg.DB db = db_cls( 'SteadyPgTestDB', user='SteadyPgTestUser') assert hasattr(db, 'db') assert hasattr(db.db, 'status') assert db.db.status assert hasattr(db.db, 'query') assert hasattr(db.db, 'close') assert not hasattr(db.db, 'reopen') assert hasattr(db, 'reset') assert hasattr(db.db, 'num_queries') assert hasattr(db.db, 'session') assert not hasattr(db.db, 'get_tables') assert hasattr(db.db, 'db') assert db.db.db == 'SteadyPgTestDB' assert hasattr(db.db, 'user') assert db.db.user == 'SteadyPgTestUser' assert hasattr(db, 'query') assert hasattr(db, 'close') assert hasattr(db, 'reopen') assert hasattr(db, 'reset') assert hasattr(db, 'num_queries') assert hasattr(db, 'session') assert hasattr(db, 'get_tables') assert hasattr(db, 'dbname') assert db.dbname == 'SteadyPgTestDB' assert hasattr(db, 'user') assert db.user == 'SteadyPgTestUser' for i in range(3): assert db.num_queries == i assert db.query(f'select test{i}') == f'test{i}' assert db.db.status db.reopen() assert db.db.status assert db.num_queries == 0 assert db.query('select test4') == 'test4' assert db.get_tables() == 'test' db.close() try: status = db.db.status except AttributeError: status = False assert not status with pytest.raises(pg.InternalError): db.close() with pytest.raises(pg.InternalError): db.query('select test') with pytest.raises(pg.InternalError): db.get_tables() def test_broken_connection(): with pytest.raises(TypeError): SteadyPgConnection('wrong') db = SteadyPgConnection(dbname='ok') internal_error_cls = sys.modules[db._con.__module__].InternalError for _i in range(3): db.close() del db with pytest.raises(internal_error_cls): SteadyPgConnection(dbname='error') @pytest.mark.parametrize("closeable", [False, True]) def test_close(closeable): db = SteadyPgConnection(closeable=closeable) assert db._con.db assert db._con.valid is True db.close() assert closeable ^ (db._con.db is not None and db._con.valid) db.close() assert closeable ^ (db._con.db is not None and db._con.valid) db._close() assert not db._con.db db._close() assert not db._con.db def test_connection(): db = SteadyPgConnection( 0, None, 1, 'SteadyPgTestDB', user='SteadyPgTestUser') assert hasattr(db, 'db') assert hasattr(db, '_con') assert db.db == db._con.db assert hasattr(db, '_usage') assert db._usage == 0 assert hasattr(db.db, 'status') assert db.db.status assert hasattr(db.db, 'query') assert hasattr(db.db, 'close') assert not hasattr(db.db, 'reopen') assert hasattr(db.db, 'reset') assert hasattr(db.db, 'num_queries') assert hasattr(db.db, 'session') assert hasattr(db.db, 'db') assert db.db.db == 'SteadyPgTestDB' assert hasattr(db.db, 'user') assert db.db.user == 'SteadyPgTestUser' assert not hasattr(db.db, 'get_tables') assert hasattr(db, 'query') assert hasattr(db, 'close') assert hasattr(db, 'reopen') assert hasattr(db, 'reset') assert hasattr(db, 'num_queries') assert hasattr(db, 'session') assert hasattr(db, 'dbname') assert db.dbname == 'SteadyPgTestDB' assert hasattr(db, 'user') assert db.user == 'SteadyPgTestUser' assert hasattr(db, 'get_tables') for i in range(3): assert db._usage == i assert db.num_queries == i assert db.query(f'select test{i}') == f'test{i}' assert db.db.status assert db.get_tables() == 'test' assert db.db.status assert db._usage == 4 assert db.num_queries == 3 db.reopen() assert db.db.status assert db._usage == 0 assert db.num_queries == 0 assert db.query('select test') == 'test' assert db.db.status assert hasattr(db._con, 'status') assert db._con.status assert hasattr(db._con, 'close') assert hasattr(db._con, 'query') db.close() try: status = db.db.status except AttributeError: status = False assert not status assert hasattr(db._con, 'close') assert hasattr(db._con, 'query') internal_error_cls = sys.modules[db._con.__module__].InternalError with pytest.raises(internal_error_cls): db._con.close() with pytest.raises(internal_error_cls): db._con.query('select test') assert db.query('select test') == 'test' assert db.db.status assert db._usage == 1 assert db.num_queries == 1 db.db.status = False assert not db.db.status assert db.query('select test') == 'test' assert db.db.status assert db._usage == 1 assert db.num_queries == 1 db.db.status = False assert not db.db.status assert db.get_tables() == 'test' assert db.db.status assert db._usage == 1 assert db.num_queries == 0 def test_connection_context_handler(): db = SteadyPgConnection( 0, None, 1, 'SteadyPgTestDB', user='SteadyPgTestUser') assert db.session == [] with db: db.query('select test') assert db.session == ['begin', 'commit'] try: with db: db.query('error') except pg.ProgrammingError: error = True else: error = False assert error assert db._con.session == ['begin', 'commit', 'begin', 'rollback'] def test_connection_maxusage(): db = SteadyPgConnection(10) for i in range(100): r = db.query(f'select test{i}') assert r == f'test{i}' assert db.db.status j = i % 10 + 1 assert db._usage == j assert db.num_queries == j db.begin() for i in range(100): r = db.get_tables() assert r == 'test' assert db.db.status if i == 49: db.commit() j = i % 10 + 1 if i > 49 else i + 11 assert db._usage == j j = 0 if i > 49 else 10 assert db.num_queries == j for i in range(10): if i == 7: db.db.status = False r = db.query(f'select test{i}') assert r == f'test{i}' j = i % 7 + 1 assert db._usage == j assert db.num_queries == j for i in range(10): if i == 5: db.db.status = False r = db.get_tables() assert r == 'test' j = (i + (3 if i < 5 else -5)) % 10 + 1 assert db._usage == j j = 3 if i < 5 else 0 assert db.num_queries == j db.close() assert db.query('select test1') == 'test1' assert db._usage == 1 assert db.num_queries == 1 db.reopen() assert db._usage == 0 assert db.num_queries == 0 assert db.query('select test2') == 'test2' assert db._usage == 1 assert db.num_queries == 1 def test_connection_setsession(): db = SteadyPgConnection(3, ('set time zone', 'set datestyle')) assert hasattr(db, 'num_queries') assert db.num_queries == 0 assert hasattr(db, 'session') assert tuple(db.session) == ('time zone', 'datestyle') for _i in range(11): db.query('select test') assert db.num_queries == 2 assert db.session == ['time zone', 'datestyle'] db.query('set test') assert db.num_queries == 2 assert db.session == ['time zone', 'datestyle', 'test'] db.query('select test') assert db.num_queries == 1 assert db.session == ['time zone', 'datestyle'] db.close() db.query('set test') assert db.num_queries == 0 assert db.session == ['time zone', 'datestyle', 'test'] @pytest.mark.parametrize("closeable", [False, True]) def test_begin(closeable): db = SteadyPgConnection(closeable=closeable) db.begin() assert db.session == ['begin'] db.query('select test') assert db.num_queries == 1 db.close() db.query('select test') assert db.num_queries == 1 db.begin() assert db.session == ['begin'] db.db.close() with pytest.raises(pg.InternalError): db.query('select test') assert db.num_queries == 0 db.query('select test') assert db.num_queries == 1 assert db.begin('select sql:begin') == 'sql:begin' assert db.num_queries == 2 @pytest.mark.parametrize("closeable", [False, True]) def test_end(closeable): db = SteadyPgConnection(closeable=closeable) db.begin() db.query('select test') db.end() assert db.session == ['begin', 'end'] db.db.close() db.query('select test') assert db.num_queries == 1 assert db.begin('select sql:end') == 'sql:end' assert db.num_queries == 2 db.begin() db.query('select test') db.commit() assert db.session == ['begin', 'commit'] db.db.close() db.query('select test') assert db.num_queries == 1 assert db.begin('select sql:commit') == 'sql:commit' assert db.num_queries == 2 db.begin() db.query('select test') db.rollback() assert db.session == ['begin', 'rollback'] db.db.close() db.query('select test') assert db.num_queries == 1 assert db.begin('select sql:rollback') == 'sql:rollback' assert db.num_queries == 2 WebwareForPython-DBUtils-ed2a1f2/tests/test_threading_local.py000066400000000000000000000034661457556542700247000ustar00rootroot00000000000000"""Test the ThreadingLocal module.""" from threading import Thread from dbutils.persistent_db import local def test_getattr(): my_data = local() my_data.number = 42 assert my_data.number == 42 def test_dict(): my_data = local() my_data.number = 42 assert my_data.__dict__ == {'number': 42} my_data.__dict__.setdefault('widgets', []) assert my_data.widgets == [] def test_threadlocal(): def f(): items = sorted(my_data.__dict__.items()) log.append(items) my_data.number = 11 log.append(my_data.number) my_data = local() my_data.number = 42 log = [] thread = Thread(target=f) thread.start() thread.join() assert log == [[], 11] assert my_data.number == 42 def test_subclass(): class MyLocal(local): number = 2 initialized = 0 def __init__(self, **kw): if self.initialized: raise SystemError self.initialized = 1 self.__dict__.update(kw) def squared(self): return self.number ** 2 my_data = MyLocal(color='red') assert my_data.number == 2 assert my_data.color == 'red' del my_data.color assert my_data.squared() == 4 def f(): items = sorted(my_data.__dict__.items()) log.append(items) my_data.number = 7 log.append(my_data.number) log = [] thread = Thread(target=f) thread.start() thread.join() assert log == [[('color', 'red'), ('initialized', 1)], 7] assert my_data.number == 2 assert not hasattr(my_data, 'color') class MyLocal(local): __slots__ = ('number',) my_data = MyLocal() my_data.number = 42 my_data.color = 'red' thread = Thread(target=f) thread.start() thread.join() assert my_data.number == 7 WebwareForPython-DBUtils-ed2a1f2/tox.ini000066400000000000000000000010161457556542700203060ustar00rootroot00000000000000[tox] envlist = py3{7,8,9,10,11,12}, ruff, manifest, docs, spell [testenv] setenv = PYTHONPATH = {toxinidir} extras = tests commands = pytest {posargs} [testenv:spell] basepython = python3.11 deps = codespell commands = codespell . [testenv:ruff] basepython = python3.11 deps = ruff commands = ruff check . [testenv:manifest] basepython = python3.11 deps = check-manifest commands = check-manifest -v [testenv:docs] basepython = python3.11 extras = docs changedir = docs commands = python make.py