pax_global_header00006660000000000000000000000064135516537620014527gustar00rootroot0000000000000052 comment=bcae0a58177476c395c73d343c7d6f4320ec594c logbook-1.5.3/000077500000000000000000000000001355165376200131715ustar00rootroot00000000000000logbook-1.5.3/.appveyor/000077500000000000000000000000001355165376200151145ustar00rootroot00000000000000logbook-1.5.3/.appveyor/after_test.bat000066400000000000000000000002471355165376200177470ustar00rootroot00000000000000IF DEFINED CYBUILD ( %BUILD% python setup.py bdist_wheel IF "%APPVEYOR_REPO_TAG%"=="true" ( twine upload -u %PYPI_USERNAME% -p %PYPI_PASSWORD% dist\*.whl ) )logbook-1.5.3/.appveyor/build.cmd000066400000000000000000000015051355165376200167010ustar00rootroot00000000000000@echo off :: To build extensions for 64 bit Python 3, we need to configure environment :: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: :: MS Windows SDK for Windows 7 and .NET Framework 4 :: :: More details at: :: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows IF "%DISTUTILS_USE_SDK%"=="1" ( ECHO Configuring environment to build with MSVC on a 64bit architecture ECHO Using Windows SDK 7.1 "C:\Program Files\Microsoft SDKs\Windows\v7.1\Setup\WindowsSdkVer.exe" -q -version:v7.1 CALL "C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd" /x64 /release SET MSSdk=1 REM Need the following to allow tox to see the SDK compiler SET TOX_TESTENV_PASSENV=DISTUTILS_USE_SDK MSSdk INCLUDE LIB ) ELSE ( ECHO Using default MSVC build environment ) CALL %*logbook-1.5.3/.appveyor/prepare.bat000066400000000000000000000013101355165376200172350ustar00rootroot00000000000000pip install -U wheel setuptools || goto :error nuget install redis-64 -excludeversion || goto :error redis-64\tools\redis-server.exe --service-install || goto :error redis-64\tools\redis-server.exe --service-start || goto :error IF NOT DEFINED SKIPZMQ ( nuget install ZeroMQ || goto :error ) IF DEFINED CYBUILD ( %BUILD% pip install cython twine || goto :error cython logbook\_speedups.pyx || goto :error ) ELSE ( set DISABLE_LOGBOOK_CEXT=True ) IF DEFINED SKIPZMQ ( %BUILD% pip install -e .[dev,execnet,jinja,sqlalchemy,redis] || goto :error ) ELSE ( %BUILD% pip install -e .[all] || goto :error ) REM pypiwin32 can fail, ignore error. %BUILD% pip install pypiwin32 exit /b 0 :error exit /b %errorlevel% logbook-1.5.3/.gitignore000066400000000000000000000015131355165376200151610ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover .hypothesis/ # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ # PyBuilder target/ # Logbook specific / custom ignores .ropeproject logbook/_speedups.c env* .vagrant flycheck-* .idea .python-version logbook-1.5.3/.hgignore000066400000000000000000000000551355165376200147740ustar00rootroot00000000000000\.pyc$ \.egg-info$ docs/_build \.ropeproject logbook-1.5.3/.travis.yml000066400000000000000000000024221355165376200153020ustar00rootroot00000000000000language: python dist: xenial addons: apt: sources: - chris-lea-redis-server - sourceline: 'ppa:chris-lea/zeromq' packages: - redis-server - libzmq3-dev services: - redis-server python: - '2.7' - '3.5' - '3.6' - '3.7' before_install: - pip install coveralls install: - pip install -U pip - pip install cython - cython logbook/_speedups.pyx env: - DISABLE_LOGBOOK_CEXT=True - CYBUILD=True script: - pip install -e .[all] - if [[ $GEVENT == 'True' ]] ; then pip install gevent; fi - pytest --cov=logbook -r s tests matrix: exclude: include: - python: "3.6" env: GEVENT=True CYBUILD=True - python: "2.7" env: GEVENT=True CYBUILD=True after_success: - coveralls notifications: email: recipients: - vmalloc@gmail.com irc: channels: - chat.freenode.net#pocoo on_success: change on_failure: always use_notice: true skip_join: true deploy: - provider: pypi user: vmalloc password: secure: WFmuAbtBDIkeZArIFQRCwyO1TdvF2PaZpo75r3mFgnY+aWm75cdgjZKoNqVprF/f+v9EsX2kDdQ7ZfuhMLgP8MNziB+ty7579ZDGwh64jGoi+DIoeblAFu5xNAqjvhie540uCE8KySk9s+Pq5EpOA5w18V4zxTw+h6tnBQ0M9cQ= on: python: "3.7" condition: $CYBUILD = 'True' tags: true repo: getlogbook/logbook distributions: "sdist" logbook-1.5.3/AUTHORS000066400000000000000000000005451355165376200142450ustar00rootroot00000000000000Logbook is written and maintained by the Logbook Team and various contributors: Lead Developers: - Armin Ronacher - Georg Brandl Contributors: - Ronny Pfannschmidt - Daniel Neuhäuser - Kenneth Reitz - Valentine Svensson - Roman Valls Guimera - Guillermo Carrasco Hernández - Raphaël Vinot - Rotem Yaari - Frazer McLean logbook-1.5.3/CHANGES000066400000000000000000000252141355165376200141700ustar00rootroot00000000000000Logbook Changelog ================= Version 1.5.1 ------------- Released on August 20th, 2019 - Added support for asyncio and contextvars Version 1.4.3 ------------- Released on January 16th, 2019 - Fixed Pypi release for 1.4.2 Version 1.4.2 ------------- Released on December 11th, 2018 - Use correct record delimiters (null for UNIX, newline for network) in SyslogHandler (thanks Jonathan Kamens) - Try to reconnect to SyslogHandler TCP sockets when they are disconnected (thanks Jonathan Kamens) - Use RFC 5424 format for networking logging in SyslogHandler (thanks Jonathan Kamens) Here you can see the full list of changes between each Logbook release. Version 1.4.1 ------------- Released on October 14th, 2018 - Fixed deprecated regular expression pattern (thanks Tsuyoshi Hombashi) - Fixed TimedRotatingFileHandler rotation (thanks Tucker Beck) Version 1.4.0 ------------- Released on May 15th, 2018 - Added support for checking if trace logs have been emitted in TestHandler (thanks @thedrow) Version 1.3.0 ------------- Released on March 5th, 2018 - Added support for controlling rotating file names -- Logbook now allows users to customize the formatting of rollover/rotating files (thanks Tucker Beck) Version 1.2.0 ------------- Released on February 8th, 2018 - Added support for compressed log files, supporting both gzip and brotli compression methods (thanks Maor Marcus) - Fixed CPU usage for queuing handlers (thanks Adam Urbańczyk) Version 1.1.0 ------------- Released on July 13th 2017 - Added a handler for Riemann (thanks Šarūnas Navickas) - Added a handler for Slack (thanks @jonathanng) - Colorizing mixin can now force coloring on or off (thanks @ayalash) Version 1.0.1 ------------- - Fix PushOver handler cropping (thanks Sébastien Celles) VERSION 1.0.0 ------------- Released on June 26th 2016 - Added support for timezones for log timestamp formatting (thanks Mattijs Ugen) - Logbook has been a 0.x long enough to earn its 1.0.0 bump! - Logbook now uses SemVer for its versioning scheme - Various improvements to MailHandler and the usage of TLS/SMTP SSL (thanks Frazer McLean) - Fix log colorizing on Windows (thanks Frazer McLean) - Coverage reports using coveralls.io - Dropped compatibility for Python 3.2. At this point we did not actually remove any code that supports it, but the continuous integration tests no longer check against it, and we will no longer fix compatibility issues with 3.2. - Better coverage and tests on Windows (thanks Frazer McLean) - Added enable() and disable() methods for loggers (thanks Frazer McLean) - Many cleanups and overall project improvements (thanks Frazer McLean) Version 0.12.0 -------------- Released on November 24th 2015 - Added logbook.utils.deprecated to automatically emit warnings when certain functions are called (Thanks Ayala Shachar) - Added logbook.utils.suppressed_deprecations context to temporarily suppress deprecations (Thanks Ayala Shachar) - Added logbook.utils.logged_if_slow to emit logs when certain operations exceed a time threshold (Thanks Ayala Shachar) - Many PEP8 fixes and code cleanups (thanks Taranjeet Singh and Frazer McLean) - TestHandler constructor now receives an optional `force_heavy_init=True`, forcing all records to heavy-initialize Version 0.11.3 -------------- Released on November 5th 2015 - Windows-specific fixes and CI configuration (Thanks Frazer McLean) - Several Unicode-specific fixes (Thanks Frazer McLean) - Documentation cleanups Version 0.11.2 -------------- Released on September 29th 2015 - Fix importing issue with SQLAlchemy ticketing handler Version 0.11.0 -------------- Released on September 29th 2015 - Added TRACE log level for enabling logs below DEBUG - Improved SQLAlchemy session management (thanks @fintan) - Removed the ``bubble`` argument from NullHandler, preventing many future confusions Version 0.10.1 -------------- Released on August 4th 2015 - Small bugfix supporting exc_info=False without breaking formatting Version 0.10.0 -------------- Released on July 14th 2015 - Removed the global handler which was installed by default so far. The rationale is to ease adoption for 3rd party libraries who don't want output to go to stderr by default. The old behavior can still be restored by setting the LOGBOOK_INSTALL_DEFAULT_HANDLER environment variable - Fix small Python 3 compatibility issues Version 0.9.1 ------------- Released on May 4th 2015 - Minor fixes Version 0.9.0 ------------- Released on February 12th 2015 - Added frame correction option, to properly mark log record origins (Thanks Roey Darwish) - Fixed MailHandler infinite recursion bug (Thanks Tal Yalon) Version 0.8.1 ------------- Released on November 30th 2014 - Fix support for gevent, along with various other fixes (Thanks Tal Yalon) Version 0.8.0 ------------- Released on November 11th 2014. Codename "improve_some_more" - Added Pushover handler (thanks @owo) - Default logging format now includes seconds and microseconds (#110) - Several fixes regarding possible deadlocks in log formatting (thanks Guy Rozendorn) - Fixed packaging so that the speedups module now gets packaged by default (#122) - ThreadedWrapperHandler now supports maxsize (#121) (thanks Ken Jung) - Fixes in rotating handlers (thanks zhangliyong) - Added Pushover handler (thanks Ossama W. Obeid) - RedisHandler now supports lpush as an option (thanks Bruno Rocha) Version 0.7.0 ------------- Released on May 12th 2014. Codename "not_just_yet" - Restored Python 3.2 support (thanks @rnortman) - NullHandlers now respect filters - allows to only drop/mute certain records (#73) - redirect_logging now sets the legacy root logger's level to DEBUG by default. This can be changed by specifying `set_root_logger_level=False` (#96) - Bugfixes Version 0.6.0 ------------- Released on October 3rd 2013. Codename "why_not_production_ready" - Added Redis handler (Thanks a lot @guillermo-carrasco for this PR) - Fixed email encoding bug (Thanks Raphaël Vinot) Version 0.5.0 ------------- Released on August 10th 2013. - Drop 2.5, 3.2 support, code cleanup - The exc_info argument now accepts `True`, like in the standard logging module Version 0.4.2 ------------- Released on June 2nd 2013. - Fixed Python 3.x compatibility, including speedups - Dropped Python 2.4 support. Python 2.4 support caused a lot of hacks in the code and introduced duplication to the test code. In addition, it is impossible to cover 2.4-3.x with a single tox installation, which may introduce unwitting code breakage. Travis also does not support Python 2.4 so the chances of accidentally breaking this support were very high as it was... Version 0.4.1 ------------- Released on December 12th. Codename "121212" - Fixed several outstanding encoding problems, thanks to @dvarazzo. - Merged in minor pull requests (see https://github.com/mitsuhiko/logbook/pulls?&state=closed) Version 0.4 ----------- Released on October 24th. Codename "Phoenix" - Added preliminary RabbitMQ and CouchDB support. - Added :class:`logbook.notifiers.NotifoHandler` - `channel` is now documented to be used for filtering purposes if wanted. Previously this was an opaque string that was not intended for filtering of any kind. Version 0.3 ----------- Released on October 23rd. Codename "Informant" - Added :class:`logbook.more.ColorizingStreamHandlerMixin` and :class:`logbook.more.ColorizedStderrHandler` - Deprecated :class:`logbook.RotatingFileHandlerBase` because the interface was not flexible enough. - Provided basic Python 3 compatibility. This did cause a few smaller API changes that caused minimal changes on Python 2 as well. The deprecation of the :class:`logbook.RotatingFileHandlerBase` was a result of this. - Added support for Python 2.4 - Added batch emitting support for handlers which now makes it possible to use the :class:`logbook.more.FingersCrossedHandler` with the :class:`logbook.MailHandler`. - Moved the :class:`~logbook.FingersCrossedHandler` handler into the base package. The old location stays importable for a few releases. - Added :class:`logbook.GroupHandler` that buffers records until the handler is popped. - Added :class:`logbook.more.ExternalApplicationHandler` that executes an external application for each log record emitted. Version 0.2.1 ------------- Bugfix release, Released on September 22nd. - Fixes Python 2.5 compatibility. Version 0.2 ----------- Released on September 21st. Codename "Walls of Text" - Implemented default with statement for handlers which is an alias for `threadbound`. - `applicationbound` and `threadbound` return the handler now. - Implemented channel recording on the log records. - The :class:`logbook.more.FingersCrossedHandler` now is set to `ERROR` by default and has the ability to create new loggers from a factory function. - Implemented maximum buffer size for the :class:`logbook.more.FingersCrossedHandler` as well as a lock for thread safety. - Added ability to filter for context. - Moved bubbling flags and filters to the handler object. - Moved context processors on their own stack. - Removed the `iter_context_handlers` function. - Renamed `NestedHandlerSetup` to :class:`~logbook.NestedSetup` because it can now also configure processors. - Added the :class:`logbook.Processor` class. - There is no difference between logger attached handlers and context specific handlers any more. - Added a function to redirect warnings to logbook (:func:`logbook.compat.redirected_warnings`). - Fixed and improved :class:`logbook.LoggerGroup`. - The :class:`logbook.TestHandler` now keeps the record open for further inspection. - The traceback is now removed from a log record when the record is closed. The formatted traceback is a cached property instead of a function. - Added ticketing handlers that send logs directly into a database. - Added MongoDB backend for ticketing handlers - Added a :func:`logbook.base.dispatch_record` function to dispatch records to handlers independently of a logger (uses the default record dispatching logic). - Renamed `logger_name` to `channel`. - Added a multi processing log handler (:class:`logbook.more.MultiProcessingHandler`). - Added a twitter handler. - Added a ZeroMQ handler. - Added a Growl handler. - Added a Libnotify handler. - Added a monitoring file handler. - Added a handler wrapper that moves the actual handling into a background thread. - The mail handler can now be configured to deliver each log record not more than n times in m seconds. - Added support for Python 2.5 - Added a :class:`logbook.queues.SubscriberGroup` to deal with multiple subscribers. - Added a :class:`logbook.compat.LoggingHandler` for redirecting logbook log calls to the standard library's :mod:`logging` module. Version 0.1 ----------- First public release. logbook-1.5.3/LICENSE000066400000000000000000000030241355165376200141750ustar00rootroot00000000000000Copyright (c) 2010 by the Logbook Team, see AUTHORS for more details. Some rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * The names of the contributors may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. logbook-1.5.3/MANIFEST.in000066400000000000000000000002271355165376200147300ustar00rootroot00000000000000include MANIFEST.in Makefile CHANGES logbook/_speedups.c logbook/_speedups.pyx tox.ini LICENSE include scripts/test_setup.py recursive-include tests * logbook-1.5.3/Makefile000066400000000000000000000013271355165376200146340ustar00rootroot00000000000000all: clean-pyc test clean-pyc: find . -name '*.pyc' -exec rm -f {} + find . -name '*.pyo' -exec rm -f {} + find . -name '*~' -exec rm -f {} + test_setup: @python scripts/test_setup.py test: @py.test -r s tests toxtest: @tox vagrant_toxtest: @vagrant up @vagrant ssh --command "rsync -avP --delete --exclude=_build --exclude=.tox /vagrant/ ~/src/ && cd ~/src/ && tox" bench: @python benchmark/run.py docs: make -C docs html SPHINXOPTS=-Aonline=1 release: logbook/_speedups.so python scripts/make-release.py logbook/_speedups.so: logbook/_speedups.pyx cython logbook/_speedups.pyx python setup.py build_ext --inplace cybuild: logbook/_speedups.so .PHONY: test upload-docs clean-pyc cybuild bench all docs logbook-1.5.3/README.md000066400000000000000000000024231355165376200144510ustar00rootroot00000000000000# Welcome to Logbook | | | |--------------------|-----------------------------| | Travis | [![Build Status][ti]][tl] | | AppVeyor | [![Build Status][ai]][al] | | Supported Versions | ![Supported Versions][vi] | | Latest Version | [![Latest Version][pi]][pl] | | Test Coverage | [![Test Coverage][ci]][cl] | Logbook is a nice logging replacement. It should be easy to setup, use and configure and support web applications :) For more information: http://logbook.readthedocs.org [ti]: https://secure.travis-ci.org/getlogbook/logbook.svg?branch=master [tl]: https://travis-ci.org/getlogbook/logbook [ai]: https://ci.appveyor.com/api/projects/status/quu99exa26e06npp?svg=true [vi]: https://img.shields.io/badge/python-2.7%2C3.5%2C3.6%2C3.7-green.svg [di]: https://img.shields.io/pypi/dm/logbook.svg [al]: https://ci.appveyor.com/project/vmalloc/logbook [pi]: https://img.shields.io/pypi/v/logbook.svg [pl]: https://pypi.org/pypi/Logbook [ci]: https://coveralls.io/repos/getlogbook/logbook/badge.svg?branch=master&service=github [cl]: https://coveralls.io/github/getlogbook/logbook?branch=master logbook-1.5.3/Vagrantfile000066400000000000000000000025021355165376200153550ustar00rootroot00000000000000# -*- mode: ruby -*- # vi: set ft=ruby : PYTHON_VERSIONS = ["python2.6", "python2.7", "python3.3"] Vagrant::Config.run do |config| config.vm.define :box do |config| config.vm.box = "precise64" config.vm.box_url = "http://files.vagrantup.com/precise64.box" config.vm.host_name = "box" config.vm.provision :shell, :inline => "sudo apt-get -y update" config.vm.provision :shell, :inline => "sudo apt-get install -y python-software-properties" config.vm.provision :shell, :inline => "sudo add-apt-repository -y ppa:fkrull/deadsnakes" config.vm.provision :shell, :inline => "sudo apt-get update" PYTHON_VERSIONS.each { |python_version| config.vm.provision :shell, :inline => "sudo apt-get install -y " + python_version + " " + python_version + "-dev" } config.vm.provision :shell, :inline => "sudo apt-get install -y libzmq-dev wget libbluetooth-dev libsqlite3-dev" config.vm.provision :shell, :inline => "wget http://python-distribute.org/distribute_setup.py -O /tmp/distribute_setup.py" PYTHON_VERSIONS.each { |python_executable| config.vm.provision :shell, :inline => python_executable + " /tmp/distribute_setup.py" } config.vm.provision :shell, :inline => "sudo easy_install tox==1.2" config.vm.provision :shell, :inline => "sudo easy_install virtualenv==1.6.4" end end logbook-1.5.3/appveyor.yml000066400000000000000000000033001355165376200155550ustar00rootroot00000000000000cache: - C:\Users\appveyor\AppData\Local\pip\Cache\wheels environment: global: # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the # /E:ON and /V:ON options are not enabled in the batch script intepreter # See: http://stackoverflow.com/a/13751649/163740 BUILD: "cmd /E:ON /V:ON /C .\\.appveyor\\build.cmd" PYPI_USERNAME: secure: ixvjwUN/HsSfGkU3OvtQ8Q== PYPI_PASSWORD: secure: KOr+oEHZJmo1el3bT+ivmQ== ENABLE_LOGBOOK_NTEVENTLOG_TESTS: "TRUE" matrix: - PYTHON: "C:\\Python27" - PYTHON: "C:\\Python27" CYBUILD: "TRUE" - PYTHON: "C:\\Python27-x64" - PYTHON: "C:\\Python27-x64" CYBUILD: "TRUE" - PYTHON: "C:\\Python35" - PYTHON: "C:\\Python35" CYBUILD: "TRUE" - PYTHON: "C:\\Python35-x64" - PYTHON: "C:\\Python35-x64" CYBUILD: "TRUE" - PYTHON: "C:\\Python36" - PYTHON: "C:\\Python36" CYBUILD: "TRUE" - PYTHON: "C:\\Python36-x64" - PYTHON: "C:\\Python36-x64" CYBUILD: "TRUE" - PYTHON: "C:\\Python37" - PYTHON: "C:\\Python37" CYBUILD: "TRUE" - PYTHON: "C:\\Python37-x64" - PYTHON: "C:\\Python37-x64" CYBUILD: "TRUE" init: - echo %PYTHON% - set PATH=%PYTHON%;%PYTHON%\Scripts;%PATH% install: - ".appveyor\\prepare.bat" build: off test_script: - py.test -r s tests after_test: - ".appveyor\\after_test.bat" artifacts: # Archive the generated packages in the ci.appveyor.com build report. - path: dist\*.whl deploy: description: '' provider: GitHub auth_token: secure: 0yLUo/V+wwSvSFk9nBW/77RN9iTjJA1B5p/TM1XgVLPPFEZWkH756jyJ0FOmtJPt artifact: /.*\.whl/ draft: true prerelease: false on: appveyor_repo_tag: true logbook-1.5.3/benchmark/000077500000000000000000000000001355165376200151235ustar00rootroot00000000000000logbook-1.5.3/benchmark/bench_disabled_introspection.py000066400000000000000000000005431355165376200233650ustar00rootroot00000000000000"""Tests with frame introspection disabled""" from logbook import Logger, NullHandler, Flags log = Logger('Test logger') class DummyHandler(NullHandler): blackhole = False def run(): with Flags(introspection=False): with DummyHandler() as handler: for x in xrange(500): log.warning('this is not handled') logbook-1.5.3/benchmark/bench_disabled_logger.py000066400000000000000000000003121355165376200217360ustar00rootroot00000000000000"""Tests with the whole logger disabled""" from logbook import Logger log = Logger('Test logger') log.disabled = True def run(): for x in xrange(500): log.warning('this is not handled') logbook-1.5.3/benchmark/bench_enabled_introspection.py000066400000000000000000000005471355165376200232140ustar00rootroot00000000000000"""Tests with stack frame introspection enabled""" from logbook import Logger, NullHandler, Flags log = Logger('Test logger') class DummyHandler(NullHandler): blackhole = False def run(): with Flags(introspection=True): with DummyHandler() as handler: for x in xrange(500): log.warning('this is not handled') logbook-1.5.3/benchmark/bench_file_handler.py000066400000000000000000000004541355165376200212530ustar00rootroot00000000000000"""Benchmarks the file handler""" from logbook import Logger, FileHandler from tempfile import NamedTemporaryFile log = Logger('Test logger') def run(): f = NamedTemporaryFile() with FileHandler(f.name) as handler: for x in xrange(500): log.warning('this is handled') logbook-1.5.3/benchmark/bench_file_handler_unicode.py000066400000000000000000000004771355165376200227660ustar00rootroot00000000000000"""Benchmarks the file handler with unicode""" from logbook import Logger, FileHandler from tempfile import NamedTemporaryFile log = Logger('Test logger') def run(): f = NamedTemporaryFile() with FileHandler(f.name) as handler: for x in xrange(500): log.warning(u'this is handled \x6f') logbook-1.5.3/benchmark/bench_logger_creation.py000066400000000000000000000001731355165376200220000ustar00rootroot00000000000000"""Test with no handler active""" from logbook import Logger def run(): for x in xrange(500): Logger('Test') logbook-1.5.3/benchmark/bench_logger_level_low.py000066400000000000000000000004631355165376200221660ustar00rootroot00000000000000"""Benchmarks too low logger levels""" from logbook import Logger, StreamHandler, ERROR from cStringIO import StringIO log = Logger('Test logger') log.level = ERROR def run(): out = StringIO() with StreamHandler(out): for x in xrange(500): log.warning('this is not handled') logbook-1.5.3/benchmark/bench_logging_file_handler.py000066400000000000000000000005131355165376200227550ustar00rootroot00000000000000"""Tests logging file handler in comparison""" from logging import getLogger, FileHandler from tempfile import NamedTemporaryFile log = getLogger('Testlogger') def run(): f = NamedTemporaryFile() handler = FileHandler(f.name) log.addHandler(handler) for x in xrange(500): log.warning('this is handled') logbook-1.5.3/benchmark/bench_logging_file_handler_unicode.py000066400000000000000000000005211355165376200244620ustar00rootroot00000000000000"""Tests logging file handler in comparison""" from logging import getLogger, FileHandler from tempfile import NamedTemporaryFile log = getLogger('Testlogger') def run(): f = NamedTemporaryFile() handler = FileHandler(f.name) log.addHandler(handler) for x in xrange(500): log.warning(u'this is handled \x6f') logbook-1.5.3/benchmark/bench_logging_logger_creation.py000066400000000000000000000003201355165376200235000ustar00rootroot00000000000000"""Test with no handler active""" from logging import getLogger root_logger = getLogger() def run(): for x in xrange(500): getLogger('Test') del root_logger.manager.loggerDict['Test'] logbook-1.5.3/benchmark/bench_logging_logger_level_low.py000066400000000000000000000005551355165376200236760ustar00rootroot00000000000000"""Tests with a logging handler becoming a noop for comparison""" from logging import getLogger, StreamHandler, ERROR from cStringIO import StringIO log = getLogger('Testlogger') log.setLevel(ERROR) def run(): out = StringIO() handler = StreamHandler(out) log.addHandler(handler) for x in xrange(500): log.warning('this is not handled') logbook-1.5.3/benchmark/bench_logging_noop.py000066400000000000000000000005651355165376200213230ustar00rootroot00000000000000"""Tests with a logging handler becoming a noop for comparison""" from logging import getLogger, StreamHandler, ERROR from cStringIO import StringIO log = getLogger('Testlogger') def run(): out = StringIO() handler = StreamHandler(out) handler.setLevel(ERROR) log.addHandler(handler) for x in xrange(500): log.warning('this is not handled') logbook-1.5.3/benchmark/bench_logging_noop_filter.py000066400000000000000000000007311355165376200226630ustar00rootroot00000000000000"""Tests with a filter disabling a handler for comparsion in logging""" from logging import getLogger, StreamHandler, Filter from cStringIO import StringIO log = getLogger('Testlogger') class DisableFilter(Filter): def filter(self, record): return False def run(): out = StringIO() handler = StreamHandler(out) handler.addFilter(DisableFilter()) log.addHandler(handler) for x in xrange(500): log.warning('this is not handled') logbook-1.5.3/benchmark/bench_logging_stream_handler.py000066400000000000000000000005141355165376200233320ustar00rootroot00000000000000"""Tests the stream handler in logging""" from logging import Logger, StreamHandler from cStringIO import StringIO log = Logger('Test logger') def run(): out = StringIO() log.addHandler(StreamHandler(out)) for x in xrange(500): log.warning('this is not handled') assert out.getvalue().count('\n') == 500 logbook-1.5.3/benchmark/bench_noop.py000066400000000000000000000006031355165376200176060ustar00rootroot00000000000000"""Test with no handler active""" from logbook import Logger, StreamHandler, NullHandler, ERROR from cStringIO import StringIO log = Logger('Test logger') def run(): out = StringIO() with NullHandler(): with StreamHandler(out, level=ERROR) as handler: for x in xrange(500): log.warning('this is not handled') assert not out.getvalue() logbook-1.5.3/benchmark/bench_noop_filter.py000066400000000000000000000005501355165376200211540ustar00rootroot00000000000000from logbook import Logger, StreamHandler, NullHandler from cStringIO import StringIO log = Logger('Test logger') def run(): out = StringIO() with NullHandler(): with StreamHandler(out, filter=lambda r, h: False) as handler: for x in xrange(500): log.warning('this is not handled') assert not out.getvalue() logbook-1.5.3/benchmark/bench_noop_filter_on_handler.py000066400000000000000000000007741355165376200233550ustar00rootroot00000000000000"""Like the filter test, but with the should_handle implemented""" from logbook import Logger, StreamHandler, NullHandler from cStringIO import StringIO log = Logger('Test logger') class CustomStreamHandler(StreamHandler): def should_handle(self, record): return False def run(): out = StringIO() with NullHandler(): with CustomStreamHandler(out) as handler: for x in xrange(500): log.warning('this is not handled') assert not out.getvalue() logbook-1.5.3/benchmark/bench_redirect_from_logging.py000066400000000000000000000006461355165376200231740ustar00rootroot00000000000000"""Tests redirects from logging to logbook""" from logging import getLogger from logbook import StreamHandler from logbook.compat import redirect_logging from cStringIO import StringIO redirect_logging() log = getLogger('Test logger') def run(): out = StringIO() with StreamHandler(out): for x in xrange(500): log.warning('this is not handled') assert out.getvalue().count('\n') == 500 logbook-1.5.3/benchmark/bench_redirect_to_logging.py000066400000000000000000000006431355165376200226500ustar00rootroot00000000000000"""Tests redirects from logging to logbook""" from logging import getLogger, StreamHandler from logbook.compat import LoggingHandler from cStringIO import StringIO log = getLogger('Test logger') def run(): out = StringIO() log.addHandler(StreamHandler(out)) with LoggingHandler(): for x in xrange(500): log.warning('this is not handled') assert out.getvalue().count('\n') == 500 logbook-1.5.3/benchmark/bench_stack_manipulation.py000066400000000000000000000007721355165376200225270ustar00rootroot00000000000000"""Tests basic stack manipulation performance""" from logbook import Handler, NullHandler, StreamHandler, FileHandler, \ ERROR, WARNING from tempfile import NamedTemporaryFile from cStringIO import StringIO def run(): f = NamedTemporaryFile() out = StringIO() with NullHandler(): with StreamHandler(out, level=WARNING): with FileHandler(f.name, level=ERROR): for x in xrange(100): list(Handler.stack_manager.iter_context_objects()) logbook-1.5.3/benchmark/bench_stream_handler.py000066400000000000000000000005121355165376200216220ustar00rootroot00000000000000"""Tests the stream handler""" from logbook import Logger, StreamHandler from cStringIO import StringIO log = Logger('Test logger') def run(): out = StringIO() with StreamHandler(out) as handler: for x in xrange(500): log.warning('this is not handled') assert out.getvalue().count('\n') == 500 logbook-1.5.3/benchmark/bench_test_handler.py000066400000000000000000000003401355165376200213050ustar00rootroot00000000000000"""Tests the test handler""" from logbook import Logger, TestHandler log = Logger('Test logger') def run(): with TestHandler() as handler: for x in xrange(500): log.warning('this is not handled') logbook-1.5.3/benchmark/run.py000066400000000000000000000027361355165376200163110ustar00rootroot00000000000000#!/usr/bin/env python """ Runs the benchmarks """ from __future__ import print_function import sys import os import re from subprocess import Popen try: from pkg_resources import get_distribution version = get_distribution('Logbook').version except Exception: version = 'unknown version' _filename_re = re.compile(r'^bench_(.*?)\.py$') bench_directory = os.path.abspath(os.path.dirname(__file__)) def list_benchmarks(): result = [] for name in os.listdir(bench_directory): match = _filename_re.match(name) if match is not None: result.append(match.group(1)) result.sort(key=lambda x: (x.startswith('logging_'), x.lower())) return result def run_bench(name, use_gevent=False): sys.stdout.write('%-32s' % name) sys.stdout.flush() Popen([sys.executable, '-mtimeit', '-s', 'from bench_%s import run' % name, 'from logbook.concurrency import enable_gevent', 'enable_gevent()' if use_gevent else '', 'run()']).wait() def bench_wrapper(use_gevent=False): print('=' * 80) print('Running benchmark with Logbook %s (gevent enabled=%s)' % (version, use_gevent)) print('-' * 80) os.chdir(bench_directory) for bench in list_benchmarks(): run_bench(bench, use_gevent) print('-' * 80) def main(): bench_wrapper(False) try: import gevent bench_wrapper(True) except ImportError: pass if __name__ == '__main__': main() logbook-1.5.3/docs/000077500000000000000000000000001355165376200141215ustar00rootroot00000000000000logbook-1.5.3/docs/Makefile000066400000000000000000000107621355165376200155670ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Logbook.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Logbook.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Logbook" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Logbook" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." logbook-1.5.3/docs/_static/000077500000000000000000000000001355165376200155475ustar00rootroot00000000000000logbook-1.5.3/docs/_static/logbook-logo.png000066400000000000000000007636221355165376200206670ustar00rootroot00000000000000PNG  IHDR;J pHYs.#.#x?v IDATxw_u7L2IHB轃F4`]`u]}tuugbY]Wu-kYQ,QAi!3If~|IHIz]׹2'jzn1II$_&3Igs JOI"?ZIU5 IIΞC'_%9)6.I%,ړ]ŰTorٿ`yǵ*ɜ @ծˁ;od4#] w\3=8@wXu\UPz^ 'Z.)rggw|./O2[b$HүjMUPgg$QpI't/O'w\/NNy$P/R^P I6܀$c;Snܙ$z@Xx=NJ 襚'%9}&:+2Nrnzde\w.?zU]9";r I$[\R@Iyss(SMܙdC3izzHr\9GC>8kuJL_J\kVg`?=XTCΘRT2$Uf{#\E~ڤyE{*|7z42vry9%rfU&v Ʉ@_WXfɺ!9G-S[I.G3 k=ӐG)SiI$ףMUPIVDd%L:z\:;:ڊ(T<7ɒ$ .lNvwɖ[s̋榡dQ!޶NCj1ړfeI&tHgGghf h~MYǻb_<3ɠ$7ԣw s-ЌfA7&-[霒dr&GCqC?dqQַmێМz1Ez:J$[#EZ|S}%r+ӹ,ɵIԫ!w!ϒLG3o.yribl3sR0M$3 ɣEڗ;q)ilK<-z4z72@)ɲ" Z|3#4SjHInD PSu]8èϐ0>yoE #2@Mq'w& %SRoUϤ.) j'45{j>W&jz5z&2@Mٹdυ?;:кO90OC P&ɺ" yry~9S'^{/.I$Y5)UұRhטwjf{p.<ˋ?$M 9/Oȑϛ]J=xNx2 ܻ\N$iHNzQ9GcWMPo귛74j%Bt=2;\Z'{ST0 sN.3w斦z<#`>O sy&7>~iRv/L$uII N.6:i1^3ie'%RD u>èsɇgA%I>zRVOHh/ߐ"Ja璏ψ$G#{.쪐:i"JAZsɇGգݿ%9C wygq)U$_O2F@!\U!'Tzry~͹=g根n5>ג*.\^[}0)=`>5Gnte]UhJuY~gw0>nTO Eڴ{\ޡ0U}*ɱe7%\ u; g=Ԁo$)N@u*r 3kdm$r2.쪰~{2%YeH2.jUQZ nɳ鬌Yj$p`W[SPn';+QfO%Tf+drw?C-9guĀZLIe!\]!{oIǶ"JnШ֜oI2@ vWMI219ǔU9'^%\]!k1wyg.YU$/-8P_evry˦E73J߇z@>+qX斦2`h2ʏI2 %\]Ga:z`dASPʊ+PFa~VX.'ģʉ> I=IS.ϟ\!gL)1`z(2;ٳ4$g ZFH2@+rgaӯ9\yLG'yU5W=@7TcOIǎ#,)_&xI&iM2 I=12-$9Վ'\سUckGM\$G&޾Z3'13&RkgM5qL.9in=fVk΢UԾ^OchK|3I1<ԉp`ߴ%䰢 _!Ŭ}.6ڔ7.*#) ܚfO <2C[΅{7$&bl ]!\w$Ȃݱ43ΚCZ,[m կ)2ZO1GLN=,[uEKYIF&9&nM+nij{Uc[g/XY=1ZrW!oj+u\߯9'ϙ;4lȂ%+$˓^ +$WYp޵1^`or-:r٩2h@Ov8 󏞑SN}mz=+R [kzIUtћ?{[<貕>yha2o|˞qL=ڵ&\/k,cݷ"KX󎝕 DccCN=%s&Mw/-oR I~Qf*'-I-cwos# 6Z[K rK̛:VNm7t[e2Mv;I2~_^>2۴˦` ̯|[7oO>rs:;;.]WE[Y$KO_?2 e6.J袏іۿ~Oe목_qn'wvG9oO2pH1ɹE~wK21C7-.iMc U#OEeoMhj'[qrk5/ ڙ7e= /]|-ƓYXV%i(8(2@lK򪔰ck|7fE.]_Y??be2 @WvnMi6k/|im).LϸaCC OчL(( ]!\(F91Z&׽QFRl\YAA[ UƆK槵_ѥ/M2eb#I}i~ϿN9ě ˣ,V=L=,yEmJE.Ir-_CY UQCzV$=8{re_p*m=F Ƚ7VXicGf`qIYnT@ 3ɫFjqW7deڭoQzJ$ЯrK'.$IrOٍ?*|yeeJlkߖE,)Vjo*pnA P+<;IUr[;yO䣷/MŭM'*\@ @zuO:v䗟]fW\^p".T$/WEy,_{nmY?krZ5Vj,n+}E%\&ySm\)}Os.۶tV/̦5 w UKW;'&h7Գ߸'W,u zE:}޴BU7QduIk"tp{xW=a+ g? W6לc3vUi|滋,y_gp$ ٰ3|7t`>ܛVb\{~S{%Pd1 2@qtiU4^׶!?x׍n̺V-ZhK6S߻]p۪xʟ_lmǷܔ13k˜BkVӬ\Ȓ.ЍCڟ$,術헟=_sSN35g>=\ԖavشWk?[ݔϒl+A PIIM2|?l:(47 |4[/[W^ TbUyͿ}#,[]vOFP2@|=ܢ _pyYG744䰩S楡!w/j˶zJW{B>␺wí$h?i.!ɧ_FN>2J!saZp۪~@fL_uIZ+xtv&_uZ~%+nPcc$(C}q uEGGg~vOoϯ]7{|ʋ3rmyϗk~uwZ~9˒e3I PS8Itxe9搉E/ W~v{?f-{|Nmgmy}??cAZO?`^+гJ$(MZFҾu[n|Ϣٰ)I2|pkN:tJ6.Ml,6>rzRG @/"\躱InJrpgN~ 9EXvCkrz,+5W=@7(ɷSRܯ)g,UrǾKWիS ի!ԋp5G2s)Ε:XSM襺,!ɿ%'̚utYKWF=$"ez1'_'F ?46UNW\ofڍjI>XfP2"*ܔ]qAF n-EqKO|+k6lG$: 5I>Uf̙<}m ?yMojGI^z4@ ^e6xi٢O}/?llRvk\'h݅p`\lpqysO/Epۃ/>ljZvmIKrk=@w"\xzW%D Fkj*Mwⶼ+X^$֣t7Uͽ.%MϘal-\2nc]v,ߕÄ{$/;_Zv:/\mz56uVn[Uv_NrIhݝp`WoJoe79}޴LCCٝz M&=>%Izz~It };np8̲s|(uooۗc:lMe;䌔)}8/M~xhI=} &G3)з5'dF۟N̞CjF]&yimW(='{s~qCh$2F _sy"@qH2=/aW]CV=JgM=Z5ɇz"_D@ﰧyn`Hk|K2uGxlE I>^FSz$OM2ʡػ4s&zmy^[lә$- e%9.+}6t`럓ٓT=Jљ<ܶ6ے,z 2t/If$o==`8u_g_5H>!ɰJ'C'pi$X@-MGǒ'3&z^/ISodYv] F !IZ||& , FoNڒܵsƎgJaػ$G'< Ltk!Eiߺ-/_Q(Ɋ8ߕ'`W I(_dF] 7*X.$mv*{>8BgP3;Os*nn| #WYԶ:/xӾu[գuk mVV9ݏe@g8jhʋ3U|;7 IN~lA[_l&}5SS۟|yc+fq\CZymU.{iCOpv oM҉' &&y~jO>9C?;'Z|Umiҝ^hv o:z_[J肗}tSQlؔkn1(΄ׅ;niv=|sj7.Л4&9+Iiz nmtЀ~[[ܜ؆2ڽmlȇqc[\ ysgX]oljZkl_;̏ 'yEWv>!J<6&Iߜ\jJr^j/2{1bpkcBϏwggnǿsS:긐wЁ^9֭g_u,$ldKj4vͿHVh쉓4Ӓj5QCi2gL3Y= #󯯽8Gk߾3y釾?.ͮaq@8 @OВ$YqH"Imc3oA9|ڸ̛:.F >G-6V׵&僯 CZ׵o_uYۯWn-$%y f.НNm˓xM3O!eQ|Gټ7x9y +?z-2c¨lܞ }붬^?K՘$~%^$?J^p^f`祶Kg̐9ljDaSAUU-۶#yzu}'+N}6YoϴoVX ;4G<~556fҘa3tPojOYz]_.KWUtUe!tӑ䷩%UJ'e\8*/I2Yn9i4{J>>SOcJ:_.厇ԵosScgϮk_ܷ`y`~9lA~[GG>2Ό rܩiitzggv,]y?;B763kLrI'qjaI+4D @\INxjhHN9(ΝSLaSJcc w;̻tCVT׾CZW<;t_*Ņ;v475[u䏋r3s̜0z!:p@F-vn{)Wˊ }lfP _IH87WTmhjߔdZ!9eԜipJo3;Cc[˱3&f܈//_Gv^W9xZ23ɯS 5$9-:h=L>a nmzn3_oߔ\qyy'$ŏn{ ow wŹ'd̰]1yuW[f5SC}Xo68n 'v.P$OIx44$G<1g>='ϙǍ{t7.{!?;'6o{ZF r$Vr!3e&;05q#sWttvm?.YKV%+5LgOrkMkS dmsTΗmT$yC R#9Ggd̰AU/(k6}Ir1gau͞}_oԹSsƼa܈!9fƄ s~`-y',ςVispYړܐo'yqO @QfvJϒes9̳Y=-o7YINW6foEgic?iԘySzo#ljߚKWV)x^dEY:R"u&1|-IT @7Аd~jϯxR̚8ON^dUuݯ)3`7_}My󥧥_syGfNЁJQ-۲:,YVCEI\8{Ā$/Ig)FcfcffUtttxk>q77bH>sؔޛwCVk¨!Ib\3{̙4 _SfNF-۶e㢶ܽhi-,nK7-I.~L$_HN8_EeD7%y}1R)c)P>d_. WSޗzf^/>· Uƾ2lЀ4{J[`[GGz|e^4wo}dY6ozfAjk3ފg(p}1&oHk O9OcO~we͆j~sk?ל,^/GڊuQZ¾ԘcgL1}{MNGGg݋ڲas{գNmmWੌO$M2Y 1K?nV.>iN2N\ ,Ji|99}޴J.~g]ؘ7?k*w-2~TwU?9N$!a{/"YGnk?kڪU:7! (A@6sq>'~'빟Ōz|~?[2 zi\4;fr"FtmE|L8 Ƨk6w;NYxޒ^QBmCȐGvH~شs/˾6tni:V xp沈 p XjǤ(I3A)Yˌe߫NBLtF \3jظQ"aCG'6K+X~ Knu[b:0`\t05EDDD=pp`sjǤ~کOtgeb#n:)4$TTUlYYV#Ŗ:7Xv`@渭tȟ~-pJ"<8wm{rU< ٲXLI0HKcHu;KXj# Wnd ,.5Qk~mM`gsYDDDs#Td8qr[ T~٦:`W3uEvyᬑj5 WmgڲV –\+lNvj( kd ,Xlt,' [{Hh*6e$'0mPG+mWV=3/ev侜9.$4 }yݖv*O<[ Ey*/K•rn:SJ,"tz&"""Ta8qkYC2[+ݡy{CїٶnӸQա} [}My˖u.wn.:ƩDXF•ٺO 5ED:\iXh*5x\0cKڅ-sцUVpKsyuWFsLБ' ֣* Ql}179,^~hf:LDJMct6}ekr6\\<Xf44Xz&"""0M91ќܧ=g oQ\b,C8n9k8BwxSRfZNXv3I $ǘ\.(I 'G`Y9ֱxw|~dz #h"ipDDDD$MNMԷqq"Ξ2~i.hY#I(iOIo|ؖ2ŘFq \9!+3g:>[+䡀/7MAh"""")A}g-r8shvj۲Llj8~?+9%rD\9? f[aЯ׊r[\7eP؍Ѻ/jȜk`7F+x魆H a5p%V<-.9kh:4ol:NZu7?>d4GAN3M2Yy-k~$Ɔ]1&t_9ֱe>ӑR h0Q",x  民Ô8m`g&*<:{1)U^,#`"ѵƜelYgú^mhVORe'x6c~4EDŒ""""\(,g ^DŽesذ}iIoo4uόy<[߻ax'DFJC:4CNPG_~러_ZɆ"<* """, ;ªQYR3q&ha:`׾R$\.h,Ͳrb,[sMli<2֘#"Fesr80]Zq4Gj|շ9lYl: vIH"!&+%{Z%TV{DmH權..fd֌ښ[v¼强h%Q޷ $"!#,>Ĉ40q5@,Grӣ A4qƎ\f/Ym: S-lf:_%>[k F$+%,J`XV%ř!VT'_eݲtc % "bv.8۲лA^}pɉ|nD{KxO(4=7=ߝ9"a,/mk. K -10[Xc7[L:7'` 3qDIj."v,?)!Cr$ǘ#b=w!k: em:9;w.WnZNs,?傁E ϒuxjXGx5_$,"""b߁O3wI5Gq^҇[tgg [A(@$ǐCqi Ϗ Nv.r[A..e(84H NxOqiyXe92㼱|m7}3^d%^X& g RsYDDD%7W!٘m>ew;KVغg8jǍ Y}qE곫 C±3rg3sͅ+|08xp 5EDDD82' e&7t95wrMǵS2{[sriEmk9IS9+_yz mb-"'Fec3tҥe~ur_ s#Ҿ |}!];&'nqDnzmkUU#s\$gMS~1%k7W>b7ߛTo@hR+"PsYDDDN6䧴 }W!ŏo,boI8@!qiCu!B mYXVB(kAT,_˽mMG5XLV""6QsYDDD&0[9_Lq}ٵ5!9ϗ?f=Eczqְ.Du`8ʾnw5B%EœC;0//__RӱӁ7 g,"""rdcA~LfJɽ Hŷ|Q+lo &7]<6-)ۣ-)U^cdY{p9Aj.Pw` ?&9>FvAۺPi^}e>6Nzr<ן2]ZklUY]mZQa8^"QA<\<7cpFӑ@* ")DDDDp5 g98O4g φu#1c:Fqi9g>jt p);r$v|1Ν0≊"#Yeq^FL$k~i.q/ mE䘨,"""0r$n7 (Q=HK wDe<'VSu 56E3mrnFf1.ކs߬rfۺ `+"r\H8t3K+OqC,]ͽ|̦Ŧԉp^>sXΤޝTbq&naզ&\I`D}3sD$\H\2!0/k kAQ=3>o7 {㊓]읹>(-KH)jO>g^9F?'lPe*5EDD$:r$) \>/'l[CԆ{y󵦣]^&:-rLGQv6ig~nFbHq[gJvfLA#2D‚"""I2r$1Q|x7э8O8r{JxOx~rܛ2O0?+ig}hr ĐUMcy%b @<5EDD$X/?h˯&qj(rU^R}{1%啦Ա\.NP/!9>t&vFFGGs99>ƍLQ(7WO@͹wضw[L沈4t݁Il;e EͳMG7k>g:Al Mn("d׾2-e(EltxQ&bH&go<۞yw1wn&QsYDDDTVR $?7NMʉյ !՛̹ܸtd$p Omܵ?q1!a'9>7W}şCE}q{ӅE訹,""" iwr<ќ7;gJLގo/̇_|c:A܁.݃8"m&.؄(!Zprޅnf.GNO3"""Ґ 2pj?]6/ aWʿXK 36S<+ddž5[ˍB!/S%NG೵,m=N沈4frX[r䁴 ,??t'sA *j]`|}c1 \`iҒxI\ ?Pdaqj.HAvNNnz WMNj j//}]Bj|Gu!]DMMEU5mƍl[+c<4jd:m7:M3ss*x8 ۋD""""?W]2$zŘ63(5CMǬ_[غ{8,Sw䢱HM3Gvk緧/KL_|队] Mہi\W[RT3π:UPD~"""tC\[ח$5COWi^q~`H\qr?Uu[l[+-%kֲ4h[pٶױw*("G沈Tn^azl]Du-ȡq#L9Y\2}>_P_Le1! jS1(RcwyOx󵦣јmlB_]9}e<7wmEG)-(ќeӽu.L__0erYDD沈8m$(tnэF ֣H xE|7Q\ܟ"Jk$(˶NDFr] !NܙmE+Yrn`K0$"""NI.5P}5S,(TV{_Z#=n Ĺ#FȱxmJo뚡0Y)c Gdlܾ7X%$>4Dl4j.[\`S05EDDظs!sYfJ7N-LGԽU.[}ѺiPDWŷۺfۢƩtoTe hOnz v{h 03JDDDQ?z p:Sqia:O;heqYùl|? ]ʸרںnׂ&eٺOiJj,8rrC0 """"rtNBijY3ѭ@,5] ؾc 'KfH5D쟯|DqikZ.ٺOiHeC&i-Gޙ=","""?.L3e8wDw~1h݄P-?|$&7 FdX,uD5+-oLJ|IAN:]Z6QcYĐ8z\{m"%LDDDd tZyYۣ-ne:,;5 ?~>]s|goōӆr9zHH[ReLIyZI};牶u#2,bXӌ`4VHe9T,g*AjDGqޜ9 nKM1IO/Υ{t[;ѽW\!pSٺgkjGjbmYjy"!"qjl"沈xd:H.-Fb:8傩:1?ū; rѳMSѝT12ˀ|=T'ibն)3>E$@oEDD .F`q(7˙C`Yzv+ݥkټχcloNr|x" 8l_{jM}Rb>gnћd5yM]NmiHsYDDD2GMվY8{$-LGҒ9u`G1De7Xn$=$z#ڭ#DBѦXq05EDD"q t̾hL/ٝ('"r~1(Ȯm_9itiKtDB YcL=`,.\L6:Aj󧟍mLQDDfKVeIIdN-rhE<2|`-"p* \OnQ U+]/~3նiPv[摛E=R6cn$H$RsYDD$r I!1CDD`b,*AYtָ-!z>8{ЛxNT:UL$,""pEn 0ø}s$yLGϾ'_].'>ƾű(z#+%Ѷ5E^~?<~'>d1H沈HԊ}=MHOӇ3(t9J^+ :4uFVDz#&ZsEB_c;] D ꊈ4,.>BDpéII5EDDқ~W7yV#۴ͳh4 M ]~?xϜ."Be/` ) 8m#6EDD7 1ќܧ= XO4}呑SE~یoRK.qH$QsYDDaH`\3ANFzr(""r >]*ޠ՘лDw IDAT6yx6`?9]z/p*"DeneBBO^5YD$|n W>8Ӯ-Nh EQ~c4kH^ӥw D5EDDW6CнU.HSLGcŷsPVY *jqBkx.$#|~?=>3p.`"He4@c nq9gxw,;hڸҊʠՈbbOuIz2=Z71"ǧe¯.@cs D*5EDD‹-p !0iF 3͍ED8|i>ek,[.w %!mYtjMAND$|>?<5?Ytmp4 CQj.,)1߫ND|̉$""[y'7Ҋӣ YY*|lkKO>ǷJD:5EDD`i t876ژ"""u[_Wrtnq/]Qn7[fӢ'"ΩoΒ5NLViQsYDD$Yo?c0:ᶟ&'M\AEUuPbqDׂ\cJDek.íhH"""+xm:ehL/e-""ūw|A4#qq,yRCF &"rão8]z: j.3@ MҒQu[g>^0u%qJ"G1iF ] "ᤲuG_w7Fa8^XDWnbnYۣ-9uq:HID$\ xO^+ʹzELtZ咛d"bj}5k Xta!5EDDBG:_` 1n6z5EDDN@e;_۲8ғͳR2Ok"r+WYjMNSsYDD$4L队m"7=t9;K#o7[^rԷQK{\Su8H8*ՎwW'59jj.e=^4'SDDBϊorï}oIk\.&@܌|l˜4:gnepTZQbN@cyӅEǩ,""bN*d9hm?Mׂ& zuWTV{^r1w!xc9!CMLIz& J~+ qp`ӅE䧩,""bF/9 #ӆc:=3r,GMnetWHWV~tH`ӅE訹,"",ppm2H'_Ōޅ\&ȉSRo{Oαcc~#}NZ[4!1X&_qi92_ntOQ. SsYDD9G)om>DD$}i>w;Vst6tnsؿKK'-)3Y)=Nc沈3/-Lٰn\6Sif/Yzjjڊnh߬1c{7՛H1 u;⋈π&C$G2x[DDNŎt`T6tkr~T:gK" }\r nq|"<$`ӅE,""=d"rb6@DD~}788k$C:4CDDlP^Y=3¼֍D1m`'rӓp\͠0/(h s}/q㣎g箚m\ 'ƴo_[ mo901郺@NZ2[q4ߖ]ޗشor =\G,p?p ̕ pxlEx}>Gk%q4J%1.-85 "M;ޗز3^N,""r/=LH33K+1DDOͲowvNZ2HJB,gQ" w;rѽ/ug L*.,";3 x0mL8o,y)&c <7ogͧZd3[Z7ɠ(11Qgg|m;m{o,8$'"SsYDDk;'MБk'1""lXVk\.Ft)`BtoLr|D9{g?CeCeccc<|0Fukm2y˹׎DsLہ8니n%`׾RK8v"""Ǧ0(2u 8,ͳ!""'s߬;$-Ml#EYk6f{ӥN-7x 0:xJ";e`4{ygڴKޒrK? \" >4 qh~{Phk2dGq zcu].|B?e+7n3(.pKӅEj.Fx! rҹ7N5CDDlk (pL83;ZWDYa+M}e7~" """GV=1obz`b=zi(~b5[4N_Ӽ~|\v˔W:]Հ󧔊)UDD`*@' e|v"HlUR ؂?l jYBi廁QcY$",""r(voi_/[+EYw;QcY$b,""r@#Y`sGc2Q;fpG1?;D$t|w\YWV;]zj,D5EDDmMŘ^\4v4pA]R\;e 1A#"eʍ\,*&Fe沈'sوX|Z " T{r,,("e"^FIDGQQeF q|0tli">[}5*4A=&|t8m*@4z8gmj/TVWSyq,~_#\Xgvclgo|L4'lˌWضfr3oz{uˠHD="" 4/5bDVrpc?{SD!kUUVh"?v&;9/;ubf@N_*D9qãoP{ tq]Z"""|sAxr,"c@ye5%_FIEU>u eȁw,7~EYEr8-)*Ь1/Hj,D>_ YBQ]Ct˂DB`P`*@JB,>w 晊 ">WPZ^.~M+7cjv,CsfssoI!1;Iԟ-ywc8O4߇umYAJ'"l߷B =@-""d ,l*@L$ "*/f;UӔ,7k&-7^,"+%fmEU5x#DͩCNMr 7O}Tڇ"b"" \݀]{riC֍C""S^Y; *~NrF>+^é5nˍ͎<Ғ:F_o_ǧ7`v۲ѥLl`}?N5J!DYzK""" Teq):""*CĮY. Б?|g3&}m~'q'<iIBHHxeWw2@",},xh*@zrmbg$U 93>^ϼg:Ə񢳽D"~ED йEw^0d("R$rY ?8~\6kÁ&1=P&s]}ۍ -/[Οc:Oqs[EDӻ&i&M6L@[;DD 0JރrY9vYqߏ쯷HC?qk\. "=)(""?̇s q`""Ґ끿`'oO^&ʋ*K+j*QX  }>ox ȇ6kϖ竿z=HKScYDBS/o3暎q,\0j.HC ٩I}8e "R*+:˸ؙ _A,\mrw`~[vs"#%^wHH;W>2XT$¨,"" A60c*@6y4J3AD$dx}>SVYA;]5}B=\rYPڿ?p˲jv3^ou@/w|>Иv˲HRcYDBˣo/fM83DЋH p.\9?nKRHdayJ+Ė>h š;_8χcjw/[u_9l՛L׺jv4z_AX33)(5aR^Y,b=Qzzst>H)@QnN>M )U-:vò9@W𽚇}Nw+v"S3ZUwv=zU5;3X르*拻pQZ&3%tD9J~?<~G^TE".aH8r7/b(!/ƲD<Ϯ}e(.{hs_w߁9ԍ8pjkˁ5uЎLwX3}5MfvJx_l1nܺY9T~^ G&''3}t L"F?""F8/DxpZd(/"bť/

u95w7 88N!5Kr F[cL78E&Lfh4Ƒ:k} kاhW^zή[S`߾}83Ź,ZqYA̼G0ՙ?*-ԪUT+mFx|.uY۷|#eAւ Q QKyI 6 Ys-b-pF @t҉1.Rm? ԔKfz*sࢴm,lrcpA7pbuXkڄ:0Et}|Aȍ; ;??N[Je]!˂ f c^>7SW}!As1֢?R r@ @ihec &2#K6V1"7& #'I6;mt2LV^+FcDEĀ?& A9:wD2L=mvcMwr>91OT%kyRݵkߏSO=eAւ pbXw2\~p>k{1y0 -nڽ,D3{{},kL)4&F K1[Z. !|bkߑHJ)Q+㯵cYcqAcnѣiA&ϲ ĵ og8p'|=nc1d_v". S|Yr܆{]}X~XnȽQ3 @pA7ְ&٫qGm,a*j - \Ɣ 㰰((#v#*L6pB tLhсYYֳRGcPlJc>.¦h`8MFl39mo3}?}uOY^y\W`QN;\@8192rxTaYLle yCjF S5ⷙxG?(u݇?8p?ؚ9sYt&AaH @7bjyx湏ێ fAnNHP 2ʩ*b{uϰ\W^f;movzWQqFX mu­ IDATte_#i7("47}L]Ȫ P6߽ z BTR:d-N% C`a~'[ǹ/ZZjJ65͕MD7|S˯|uO>d߿w1RYWiH~˂ Bbg'qẆۊAn;60d nDԧN-\h_ȣ A"1 NXXlc@>71w hR&0z ]4L0cJ|HYl7܀@`n.MM7V BβY,~Yejr8>JAI&M$.uSN9ݻWzq. A2.:in9=c-Z],wz 'aiN\1b5}ERD& p)lؘk1] mA(pAh/l ie'x(BNe>\š~.E9m 6f", kV]+# `AjH(zJs fZ6noI׿S꺧vߏUC2AX r% >`$YOy)W۽B {UAլ>LIwNrj`T5n0؏;$A:akbőڽ)JHVR [q!ԍ GE7 k) X׉cYR ~B8ͭ<,.$; )zA X^b )ϿַoGqAnLJ1em+JU"M'bT0+s,,pqJk"V 5K!:FXdžcHOf9۽:jҝ lR-A.C\!E145HΧᵛv>7_gqnή\\a-oe Fs 01'8 ±#<;|B}2 * NH12@.҆Vyr1j1?Ys Xjw²:]WPk:/V%#t4,\H֢_lwkŮ.9cC\63@2S X-sɑew AJsOy=anJrGR 7_B{;P!eAa, 'c>'F9] YXi#=UaB4Θ+2͆tɝ잎 Қ S*S%ݴJs2OX: \@Mb.HXAgЮco?9-u45[ k0Nk}", 7[|;T;| m`}lc\f Mpy睇",!: 0A\9/}x/xUqd>҂ <H]h6@"6<σsٓX(MY{la's,,1& ;؎\g²xS1}֘o }Q:i93Gs5MGfaZ !j. kA^AX|EXg7\$q#ZivP(~ot(E eS:ְ#\\kM nߞm-E6# G16u0r0Ӧ wheW*pb[*7Hs>>,vM1Q[a`Yjwcxnx:|1mк@<ΚlLU$j|2{EW~W0>92O k>J_4ǖqxP $ ܩ0[%@lG8n{KرX mvvNJmke&5.lr0Ng=$6_axNȔFwpÝۇK]IOz>Zu\IvR Zx8k1W)>쥷<=p${ldPE( m´{w8(p4 s1%XrcAу XXc[4ӹ+yrb]*V{׎DtSa_eDNia~rBeaS.^'1YbBskҙ5B}h7yT_|1nRe@˂ ArU»ଓ+{-1 -tFvߧC* ḋ k> @RL] ;NXk<7AI ݛ$~w!B;6 ѐ |(ux*9lJaהt%;]].[<-v8'<"j,sӵ*Wo7?\Oq-`lt\a-H r +G0o'OZl .sR*qR#fnc> K;nL瘳s5U~ŽdEqõEo9vL1yT\WEU|Mzf9JaY$kqEX#NQO1J:qX?o² ڢ~ҩ3B#{ ˗\r ~JR8AX ". K&Fp 9Sjh8l'ot([֪ .H[뎢6嗄$.S-o[峬 h-ʲ0R0*v{VIR g Ka3ITPx~ !M)aYD KmBom$1Bĸ(>ǀkf<*ø{qpY꺗_~9.,,ڐAp(Gῼi=u˝s0̒XL_sAA\b=D8ܨ8l=9]J[M(,;x#VC€4&nZPEȎek PL'Y ZS5\6 ZQ]LYCk!gLuį+:ܔD@}w7dzl]ZoCfmP˂ CA%7pc JUzS>`- SPV-XqtQJ)K#hxKex(L;1u!$u_]Dk(vM5P-'fRˆMf2k"+䎫 R !t8y~98/K]ꫯ//{ kA^Ax0*δ/\}Ye/ r^nb֣ǣ(ɑeØ1a(#!sR_}?^8씺^w#\am, 0&/{>SW3O*{-M6.E(*0?GRҡ|"\I:0lG#T2IG;h㔉J%M(dC;mrEm( )M" dJatوAbL>4R,p 9)/g5LT|~8[R}󞇷m#uYvR 1>O+{|58^z{- ~3[0|sDv߈$2m։W`a0 -0 0n׮+I7[,q.jڢcb4x$@+X P^Xc *:2)l:[ E?b3C2$6ـU0YnRx0Mw^E/[Z>\a-, |9e/|4zݵ8ylKoY8Bo0ѵ2XUxHdV*a1ij9!]kܤ  R+c& DG3w%gpF,ЙvѠMkpF|st-JS5C3Ƃ]jmqZTY!*ksS5or(~y+^׽uNXJe]!˂ x8~cO-B@Ǒf U *M#Zi Mvls~8Tpn7) N 7 Kms#g?v ^5|%evN @bm(9 q(}O JiT+2MA?VůFJfj,jD! 毛Z l+7$կ~5nM),\amH p!\rA^t6gspY4ќ HjXM\{KZ CܹRr:NP4fk'-Aob+ BHz;,\yQ<=n"E,|Vrt9B5}~Fi/0;1 -DC:xxX^dJ_09QE}|lOunk^R`_q$1pgRV:8Ð>&f: CR#A7tâ7I xGGcvzyq6 ;v0kЫ9`lb=ƍ#ksc#r<.cV1{HD4&}ԪP&kڽA5z_皺\4fx C_sqڰx%LC6TKMG?ޠ\an«^R\ 2O E/y(.?~1^s^vb-pFGCytofixCAHȲwl >I.vqwР"7ѥ\_Zg^"QvO5DX=.P,2+瓓ZhSg\eX/D z((cs(dǘ4]kZ5lCea `Lɿ/vPlLg~5TKS`nJl[>ru]WꚫE˂ Av.JiwR\ye.1b$s 8KƐ0Uy>Ba ÉE"4}_I19:-{Y+]u+knp_̳L#&܆u/R\`X WSD0J'5A{-u %F1~ßGaS6[U3\?M^<6trdZc~R[͟_# g~g𲗽5ׂd. Av&/[(Y0>V^}%|&706̈́B^yላGFa8}ѩŇ5faIζ}1R&5֋/W>&Lx1WM(a<&9+8irPMXrnτo0ӨarԽJAhT1&^jNc0*˄U9ldhP8Tlk_,rSs?sx_\kE˂ AvVRN}Xniy6+qZk|E@$1M4L4I+򏈻QΓIw\UBa&anjE;Koca$3GX!b43Sq*iCIs UFurBA6>+X/ 4DXFEG=ʗvezMԅaEi Y,dǔwMW27_s,ַ/|ak,ZAYe/z$t-N{|KoYz3 ;ZioxLE'Se?%DT:HRr/~?cl;8ۨa*~ۗvoK-/F0mMjNbKEi J3w/*,+p7k)mF2AX R n.{pWc!*N^ 賑šSRZ^vÂ[J)Lal IDAT! SJkwe iq:ޘ=$ւs|n4wb.Oοk11 mv†mX<U]b3C1x ^s:W;nj/ _ro$ZkUW]UޢzѿDg F 7:Օ.} ,Xjw96}SޮH,΂^8$qU0ט@UeaRk"a\ v<ډ-nY<,1Db?Sܔ\/}?)uM5+(uݍ"2 ?!j=N"AeAp^'wRo~۶Hf "*z`ݎzQo=, -@ѕ_4x=,zW2T4c ¶a iM VUgh&KEj"OzΆ7\l9BY5&'d002:huRgrC׶JсJ"4xiTIvO=L+욮3ZZke]V:Fc$; )zAI\W/~9],7Ujoz1rxCCq!Bsj GY`29VzV>v/7v8tmk=Z #6pR{~6ď˜ Z4ޕ-H䎟Ef5vMdPi@:򎋛84D>n(0w^ אTL>*DqXgac<&_STD0F 78`;Y] 5&3B.^#G:Įw8op۽5+ nVrQ0N=&m<>kǜcYeaT=iv 1-Q,bW¤ nfs%.f ڎ]WV0Yn_OEkVU|ēR-u@AaH l|S\4qY[k}PdR0.~=[-(f_Y(4Cέhg'P? lXXjNJ1ypOr;RȇD(2kE㆏I3]L0EU07Y#ad,Xjw#Q`NU:kw.{7H.UcC 5yXu[ ? k~/qWVUvm K]lĹ,jqYa3s~EDz ǜZ[kM6e8u֚z?/gO]實sVr6 EGp;DU[¶#7.뵟ڣqJ-r 5LB^"î$,tV<~_ߘbQ[ÿ 7超v?7F5?#cWHȭa"gf:6݅Z`g*u݉ vm8K]wH E^ak^ŝ}=2kqhe7k)4ɩFbКۤn^֦sh616Iq9 ۋv6{$N$ti17uagxӇ2aH6)6_NٵqMQXa4 Kqq.evn(4lR`7s1!.&YV5ZZOR׭;q9町(eAV d/day>7]G||ni\Vk+Da<8kR2H)jٮ)Ip߻dHs]%X[vbRh2ᘈ,R2^-(PCf f0yT TR H E^akAe.gqr\niȱU3`]$d ^GVء?X!63+qF? 뮵^1X͛q('{%9h󑋃5$o7MSb[?}h(잪c"²P>ݾƎr_V>9a,,s󨚰QcRL&z1S/4nȱVy)_OΑDRu~`A&ػ*:Þ¶CKQXbWA:ENgqP ZgԐ3LbnM`vSWCXU)u1{\;6L7DXFAX(zPds3)&p-H[$ j׶#ˆnS eFc^<33XaX AV˂ [Y8aq?z,~MǏ)s-ar|s#|Vj7o镊SܥlxحV\'%s`LC ۆn\S97YLrR7b4 cmL_#7*ƞd #!760HMqcA5A} 0g?A&&j>}};cTsss8p8RlH, E~A<'#N؅_}qdnim,s>#1 R E Q`2porjr<'bm%=@&²M9.3qyHq, 'pAQݮJ6+`18\ɰs&!O',+잮#[?A2AiǸ ae131c9֠; 5qoλ\%J)LT+m6C<>R8pN?R݌sY";J p~uqk1Ә(s-[}j69<+Y)'L&D'UsCHֆ၅U]: qП LQ>t,,C,r&!r[:"$ ܡLٱ+c}}, hL̉8[Xc ޓ*Na${ivRVkuZ[[Vzo1J# (|>2Lނc9Wh櫥{n߿rJnVQ\IvR /(ssN;wp &k+PIe_la"}dLc(҆lWwSMqW;.6XV"W", 'rO5b(z4 !9Y)>Noznm7|% #GxɚdHXjw0WJ<̔{N5Ht"t"WeZZkdZcll8<;~s~u;8߿'|rnfQ\I!¶DeA4E/8$|PZM" 1ku7ΩgYQhDW# qEYVq1K2 Q"DX N *@{>ݠ1uX n, iw%(L~U?o$rqX~@QI'0:hvpo;Ĵ\m6B7ےM.hWkYi6_B#(hOds>?xnv1spZSo=O(EeA|Q>ѧ1I{ ,g\ !*UGs5[$ւ2Tџ# 1悘]xVgB,Xjwwg=&4vfy?1ѭiM(^iQ>?5,H+Z330]TM cšz%C#|G5įCi1m͝4b&TkrBQϹk.:Lo3/uO88p'xbn*uGAASasA\sOJc 9#4N7 FUP^s.gYŝ#*ﰲl ETTpkѺE-A3?YøLk# l6aZIŁd6D,?`φsQM"m\Y@"d]ea$ ra~Fihl1ɹ5'1́topW? |"/|#@6UKtȏ#8A!,yGTW(+YX Zl?AA?$rȹc{>mYϹ|ܒy_]d8SKm?$q(%9~]11XA0CqkuY3Ec|L%نqW[uEk !a2}S=29FdȢ[r`s(麷L|ٯZkXc`[Ҕ &j7919f5ZsCM4̋yb l75Lpgfs㚼kc*֤΂p~R=SNa$,XƼ}]4Z9NW8u5 $:'/eÎfLhcZ"d6V7$׾]꺧z*ۇݻwVD˂ AFKϗVE2AX-ۧ  O+s^X &1'8#U$'mqx@y$xq8 m clfތQV0UaY{}\l(nA~+9lLj: 13!a״!#*nVatCethAC1e;KIc2t\V!v׻t^~e !rta|$/1   Ta4|\Oy,|4;=,;8A{\q^䈭@hDBvx3zJְhLc!-aJ_.\ Ggk6,,F cOg 1/.> (fTJ͗bx&B ߈?$jۡ(w]dvMw dMw}gcnnu:!jqY|>2}PJǑf;8x#ctyH>-oM5AvzBZŬ6`?H,1rʆ 'j!1V(mcD!"h/)E6 yB rk!CYp[,f6vX{V36cGo4TA3왖H[ܸ^|u>l۷333 EeArpg?xۋ.7QH?6]+cpKa3(wFˊ)לN$7<תM? lb6;XnwK:DMQ2 'zxD+rFy7}\}Lρ0?U Ctz#exR=); mF3ҵH8lP [ct]ȑf7q˷/u<1뮻0==]q. Zd7I<*~\O|4=Cc7ȱ܎-Ifdtf/8] QPK$-m|4, *f''VH6:'z'pjY YSPmLCHh6_Y8ď;ul"xMQmkQTmH7r zy8u~v9MbF|]ǮrN](  8?UG%FkC=wy'Fn'Ĺ,j B9h ye.zEg/aX9GI IDAT-0L9!39m( k/cXw̅v3ZنZH le`ᵰșK94͇qCHO5 j[eR NxfjmL,VCK†b&Q/<PLXy$a\772P^c}y3vM5Pr8 wS?{aysY"I p;,sk.: %| rG[0l`I6Gq L*qB!vJvjJPhM#akcš?v}AM郟i H)&)9yEbe`h'5D Buiw,gMguSVKNSE``Cg&VPJ:$a6g''0>&zFq&n^|K] Ox>VB5N$;q. l<e. /yLj1 (V݋#? dF1XgEl(ha,@;HtrUtb3MB:&²ɍš6zw7;:p7sJ3%hkb292?䊤uI.境C­ MN,upa9C KIS=xc.30{tҵx1ZUge\= \pnv q. ZD\AX~;\ _a1~?(ۂԛe\y!C\\gOi;vnIn&-tĉnȘZ++2u,jAc??KS8G%Cm>n`|LL`BPy-EW#W?~Z㏏3h%uRkVqS4í?UGc6B{u'nDng$sY"/q23Η^*Џk]N 7|n}_uh 6yLc)ʇ( rZMD˂ A6cg/LcZ`a[8 u} ̖h-.bF*,(ΧNĨx#N 19:UҒlAZ [nElBB3ajk.Vsr[hP06RȚ((oKMDXsS5BJg,,D6v01mXxL0,v_Vb!׿ۇ;/ST}ayeAV A֟k&f{^.j80aS.BebJJ:LK'V9cJ!iL,lyڽ>;fwb4F~xXhb\Fx6=mBh|( u", '9L_e[ sxƸM~1ó k,8Dz־Z`nR҅g<[EX *uՃŹ,;A֗g ճayiv|$ZrYo C!Nj-~3L5oδv?4̱FplIivzXXj0$rDscB2\ܔQ*:cFq/R=] mymut*w=ߣ 0wpXVq T+"bš&Z^f 1.K(nCβ=%Hq WAcalVfu$5]̬ŗcp;“JDdxMq D1¢V`FrZ-:}ڸ)ֆA69f6,ӆ#sӨ6$(t8dXEuK<svkV+0XXj d,fsxuq"i|AaU\|qk6z<618d 9Z{I0DaE?ϸG CޣR *% C;(MN~Ejl`ק0s`U!_ݝ?c Mֺ{ƺs2ưgy_Bn,{QCc7AX ^c|  5f?]Gu":;4լR5B/xsm SqL֤6w?V_]w7D'*=ka!?֥]@,x~1E_A2c7d"+<@Ax 'b:k"N'6r):40Ql|[i:2&~n5Le.)¹-]ʱ>]΁Hq]e[{/3=3@ a [!@&aulJ  Id& nȋȾ("."(Ƭ]{s?yyέUu랚I?s=5ڙ_CEo0O+/zыDXsY" |{`Y [-/ynw%w| yr(Sjs??.L`Í3\V>8m`Z[$FA_LX-DmzVU\1 I.l -މq>mDsU+,iMrutg@xMs-x-{A׵pb|,?R:DpىϾP˴lψ?w~aQ-W]uRNd. QĹ,y%b_$nߞQ 9@~r{3!mm]V , K\ʬuZ䆵T04ېZ mM8‘6&Vh;TB~ݓLXƗe #yMڒ@Pĺݻj -,:l0&g]枷 tXkinJTqscsv(M(pת,/d ?| ?qk_b\a,y7X^=v%wZ;f#ifd*J-鰯0w'#XfL΄\X׶j`)m7zG,v71R{Y^Qpd@̅y|z{<) >Kt,75[K6 8A'6QȥO/ZkPO]9Qys3)٨_c|*",πey}nk \a,pj.βU3pߦ%wX[&=n|܈o">(ٝkQ|C0tm F9NY-zHv ˎjZr-c(橓k2 Ĭd盲Q3>_O姎/{g=5 A(R ?LktV8\ p lb*{G< UytH ,KwL)Fduk k4B^=ALڽV;}fBTt,ZEw)_k {VD@,0,2<5 Il"B,.ε0אg;7Gu+*~#3zǚt:$Zf|O%WO}SK]SΙrb.B˂ XL+7><疱܎z P*FRp­,G]AS:ȵ&n/&^+o%N܎+(,:'WDX7Nv^Rls${H U>rtK}:.yb(t9oaf'Ȃ>glt9Ma]$9Uo6lmu/uKօ6S=J ֎, a|[JeC12 .B ^!e?O8Pւ޹v7nC= JH Ǹ*uQ60h(r6\XC,XYAA<R`ozU.ezo6Pp[M; ҁciT m[tT c)*@!˲MNfNf\vtxfZcyZELZB 95pߦ-muQN.Bs :u7K!~c0?sϝkzeJ&>i? ^M~5J)\}xғTښ E؂ 98ZO~(܎`O梔 1omAz$zqP}47rL_<̹ɖ", ۚ.ֺ4rIq0*::M!#;( x b>CJc:v+g:}͠7yay| 3 a`_ rgYOIFl7q- UbڏαmptNs'p1NÎ-D5<1.sњ:5_+ut=¦qE?a^ ?w(dY%<ԃ/~͋6nt"ZgF okcŅf zu/d]eZ)S'EqYn)k\ -B9a0r9ns+pArC`1>ThR&3o &"'ܲ3=kk*.U3}s l[8A8bϦbp6mTu IDAT*3dS!: g*Sρr5hw%:ݹZo־A1v&Of #qƅQksPKY t;nBS6lͯ R׬T*pE0=*\]˂ V7PҿYv~$eYλ6 GQnc_7\K2[HۆÀ"!c|fe c-7'OX ɆƋp||۸',6ín餮>}|E?*Y}-4kZzmLnpɲJMMfɆɆs*SZyqmReHq@Uh=רI' ß+uj-xVt EeAv#wq2q.Y`7@h}0Q[Q%mXEl'@Yt|"Yai)(¶ Gybr:,k-(6Gzq5e6(qwe< "5ug#xaYcb l cѵ_ꚵZ o}[q0}$sY"J 3| JzJQzNb+UpbE'$o.wT Xi1aD04Iǵ1|okԱwq)Ik6rC}Z ]Ҽ }_dXZhJ)j`C ~c֚]ZT~^5ld( tXvmCa38 ln9 @.8[_;>g[q .(u]a6LY\IvR &|e,g^D,XnG2 Ga( с"sm cB}1&1ʘۃ̄ HݪUGea#'; .\ [)W\CmIXFM7RQY1/]ԫirk]r3b'5vFpu]1*FJ'k+%7, I4ǻ~/J]hnwRf˂ l)xAv 5p2W+{XnGbX2 7@&N5B 0wxaq) QF"aۓ'vT*1NIytVZ] z:͢hŇq&7>;Zp^:b.&¦1αKu@G+D~PenRWm7LB9tzSZƏ~{_,ufoo)V %1e]˂ _2JM},ye,#8pRq3BEAF Xkܻ\N;y' ^5S L[Me}GVMq5V,s"gɦ0>QX_OYRoK2[bMnc~MM9:d5ƈ X^hI>i9=jf'\7O2"xWՖ_'uX1's"r;ayyAZ~3]꺭V Ľ}Rf8A(R ̲{ܲۑ_b8J|5^‘^J.S#L7E''1ҍ{~\͚\j0qds,[\ 93,cye`e!fB=_-Y MT2u-lֺ ^ǜ34r& ;s?_PZePr_f #]@nX²Ok|xgu=K]W('FߠAOؕ{܎dGw0̉(AX.d(ocrME0ZIx:9¶f8qt넧 Ђa4Ә:pbtװY]L " L7ҡhZi*[l,l:⬻΀od"'An~uN]5[ӆcrQ<3WڿF_3MB/Ή`qmps/뽘S720}lE€ض (&R(FEwCEnr^Fa✴ ۚpolwpK6!" TQ`_)u&q$k-_)&hXsbc3s} 9Z6MIڥmňA?`<9Ǝ}JK1ui| _.uݥ%>|~ n8-a!W|Avup2ѬUXnGb-pl< SW>U'n*k܏$R;#rx6xbW<_a⼴ ۚph'$~YnӺPq;#шY*v5oS#7?kO?2>;uC)5h q. .B ^pCXly/ze,cY3~}Z)"segJfY&ϙ"Xqui5^tC/,#r`-w5Ҧ{51>[EQ7*k =֋kIrAWXoHikq|p6⌀pp-/yT pvh6dXMxmq+ -T+bX_m|uۇÇs)u]aT* 8C ZOxN 5kU'କ=e,cX*LO2V1SEODpCmǜ`!<1@+u}]h6wNea{q HqSgC;9#c}9Sk^EꚲaI9m BW4 1GV ڐm\ň U( A7b;g F`>c$xVRYn[ĺ)2s/}ta[EXޥsY /NI-cLk\ng(c`DZwC3iVNJ &v ,ALr$i=;ހ[,7Ѫf#q|3VTp= Up.FsX05?;-} %:1|sFbX o[,l18rq?|33y]6ޟh5Bxo'`M".N<6+ -ԫrk9-?K?+u38YgUMT: [o/_ǤQ35?(` G9phS1Bb0dg[oU]eGAv". ݹ-hK..e,c8(}}fX!G:.hϱ_c>݄ |bb qg ۜNiOCcC*$&qhs'G-n6iBn: 79r5'pkd:96(7W~@yp1aXw,.8P:FWVOsKB ^ywA9ۘUXoʀ)w~Yg[ogyf [AR lg>ve,<^x/cNsIFG*ACP6]L鴪8 _R1b 7 )\aeJ&&I}:l6{B XΐzN2Эi5R. :Oj ^ܹ{2VMg0qdac2ic|_ 5QɅt{hLac lL`W^꺷q!8 qoT3%Cv". ]~#}^mI?@gxG0Ԙl0)c\@瞷4K2 U-y l_ڽN GW 6^hFjJU%Y$hQƢ^Ͱ4dP>GV; 3c-|0@ W˙Zqwstwn$<^[UXh0ayZtC/{9СCؿ [q. !vZBmg_"i28@b# Ʒ⻸WAqơ\T*/ _cr;o<J_w8q;|3NPj%q! ]E'duZ$rcjT<aYTz(,:Kk)\_w2{XÜڣǵyb&0L:ߨb)YӢॷ|taw#nV'FqYxM {%dibű.Ͷw&,V|Xdw# | 'LW~Ϗu(-;רce%ua宫cZ'1n3'"Sl WHKzWTNԎƢPMC4S;޹ԪtC[G~Y ،:4&㛰 ߤMctj0'u||Vk/Wwy8tVVVJ]WsY /vAUBKM'Mib-pd2VCYŽ`G?B'a)^B%DGrNjIb5ͰaVb^VJ.f+i=:5V\:!N{AYU/lZIeC<kCm!iW@˘_07Zݚ:F{wi%7S>nfݻud. Ĺ,v0sůe+3gԎgGNΡ7:[p"p.rYi-Isz}SJci)²#X] 4D31T45TSʋ<"M\Y D1:6ka 3na <Q MMeLLg.bBLj ^+^*GCA$:;b>BUKQ9c31"vpѬװ<Ϳ)8U7}ta><(²1EqY~]˂ lue,t|4!>VZ]r}™عy ?{T*U3c8GMxVŹ*ֺۙ.f˙p(L+ @XJk_C 8)ۺڷ$(X E9S,7ؒ,taqu6߼$]~2ň;\' =7lCG%cA@Fh(Q(鲞mY盯͚-uM7݄R/\a#H yCe,t`<2 5NZ:GI\R$[:iB{/JDq@~ۖIoTؒv_agpzS|8mw~V{dDb:n2ͣ!td:ʂD[!B3P(WmBHEUHPCPclSkX63d^˱jK-t4 8a2 IPqpo-WX~뮻 =T% ". ®@eA ܿ^rp}+cM:O L'ۋa-l\Ucz= IDAT4GR'ʴμbWv׻c(dC&.5I&5ɵ h6C]aOfm#%0Uz!;.(Ǵ2M6}5<mtBnp͍QU*X^hBt⿏^7ͨOacLѹ 8qyma#wׂ l52삻9ahڄy˻{L_MV~s9ī亴e̋,bhRS#|\NX(@IuDlfʰDR}--G^XplӼ83wxp tk=e+cA{!t,J}-9+n@pfˀa w؂ l%2o~~N.aNo'g)SB;5lRPܰגdf韫W#K{ ;:нB]‰?gRgQV4L\N*.~5U"4[VJif*V ,sW^T|8E1"d;ios]Qͪϗ{8qMU.X$&n!^b\!l \qK].µ^+²pLY\ :A%,V6p93E<^2w@?D?Ɇ XL74 ,28'FNt3&' n `iŖ vDN 49nӘpvUo&l\t'鰹C7}(HYkQfؿg,XYmZx, м&\:lӆ"Mb_e vycgd",Oq\q'/b\s5VeHpsY x/f\민KY/'=(r#V.uxB+&g(Fש{4-ЬD燊y!*qs*YCc*c#Y1&‰dzxwR01cgu,/DXY`8NXN72CMH'4m$@B)Cx/u>Lgciͦ"r$vaaף  /SZhEȍLD`~,gyC0d6u%Z:҅㍡z湻NZ3[hAkQp|[꺏{׽nb A]i^^B/CໝSR;.ytUSNdnU"20}R̝e S^C K-Wv' ,;AW o84$OnL_El'?|ǏWpNe.eXYh("l F6r[qC9 E58{.ɵ1Sf,TKJ c]S⟾q/8!lJ] Ox^W,L\a#,frO*c\xOe('[ѓe \ <Ǒ'DṠT<7 rĄF.v}C7|4Egs&Qԥ oUMj\aWRea&sY W$A6>5pS?pqNQnpb_e*z&Bș$!2~qx4,^]o,8NXIqS q5ƅbk1f<gO3E[ pXhgԮ`p݉gO8o|\$nNI;|*aYqhwxלXsY\esQ̲cc$jt,C̉,l)GW݆i.Ә&36/u$G| o1)9 Uq"3]O.k"EEo/_X~3!²P \a#N eX"Z)\Kq=^jW0!rvYk2ytXb KZ|Rz{ZM;v'={e4@Q}/yWX;)ga>il@E)j"IapdsYke6e1e~=k$QՈ3;O`vEk©}aJ+MhPN/~[>`"zֳҗTe$sY .B=d2/Ax]Ξ2c,Nӹ $PYGI 0f}!s-s YqaSqri@s1nBԅMFPb[׬UwN6Ew0ĉb|S:p=(a LFK}n_+k$lᵙl4ay);S/?c?`T{Pe4D\a#H P-7S=ُ߬5h0n<3&eB >o#v󧚔]ɇ"Ut&W'p6 2>2X#1xz1(ڰ)1w(f4x׌;m|Sb0Ew0‰v/lǪk{c!ԍCBI\tNQce:LM@%˰oqS _:~֏?,WX+qW,ʔe]v ". P 3ν^)7%:ūDT@pdrCSeN.zwQǞV]{Nޱ\g$ .:GcDe :3T Nj>h,4Qʯ֢zWLwߘnHx*(Αn A?XFxfǺzȔ¾ŖSOx;>hx _?)8AR KJp>_.4;&/$:l 66(ɣ445v(ٰoOK)U//,/aYtĹ,F$AfEo8c =gEଗ5 ;)JR*i: y q# -ѵkЬc՘U6m7XҘ;*O\8K01vMS/;yyFM~{CpcuVvD~+^3@AGAYǮACONֺ6OX' h'v咛IPp;Ir,*:ay|/ ?kSW O~K]SNŔe%Av ". 0 .pU3Ǟ9qNn?LDZFy<πRf7ZĕF%Xk00aGVy6@p P 9˓hR:+ǎƸpxEZB -zoNܼI0񉥭)q. A9#~#q׳z](7X &R$}ϛ{5gOmi*طaY87eƋN2w5ƖI9vb19m٧qXr|fYf+--Iki|r5aMV@Z@؛(Bhur}2DX"/҅׾", [\a#o$ L QBS..n^fWqӛzo@mx<V;אa`Žf;je.X7t< "^Lx;_U@q]f Fav>Nz%qMp?qfǹDE0?hf 펯d̉<%>'_i+Ck7 [TCĹ,Fbaz{{~:ev&qq$ $8'eK/k0`iZEL f;jc|:/ 5hIC 1gmNiXZ0ke|䟥x;c|SYj1q#GKZt7 &bEVJ˝42oqNLLK.)u]ANeA6˂ L^dy7?2Djk3Q`.erD*xÎfŖO7N7[q!nM4LΥl bkXJ\+B5SX|eac?`繯t859\rP31<3]رu2,Պƾ{?7-u,p5QzT w8AR LEP"x{g̮cӃa,%! љL84uUpeZcizU.?Χoӧ²a1P$cF Nʱb=a`\ =ߪU$CҬvXJ cI*[3wҍSzS4`k ͥQ9ժͩkCTꚕJ^{-G |L9sY~] pgO~7t wBN[o&Yc\xkE-|ELt҆ƺvh8 j`cY0Say}7|sYV7 {J]W6B2k .AA8] ^?>^fqsyYXv#(bb&lºO_ԲBE80û~/5k:HyY+]hO/=Z~FslG8K\E+ Thw% yqJse\ZkXsS sOV]6NXP€f‼4<(Blx*nO8R,*egX ?;?祮[q .(u]A8]Ĺ,FbagR} no,ٕ# FqPQjb|5&`YkX۽BZ޹*  9R9I+ Xw<(&cGp(@3,/6Qͤ L>((Gq]s@t(&wk`ep|ѽ\֦eݜZhkXayX /K]lmo{w L)˅ D\a#<fs.?rsg̮d˦oO[Dž0Ϣ\hֱتoʟI6(Ǒw"#e"Sd筦qaea $.jtZŒ >{?=iӓjXG{nPqqfe20YdQ<:+z"#ïK]jn}{Ri!eA6 -Y/r;<^fW0iQ|a+f0$,ǟy^,=u xtLWJqH#=P3?sѨU4?Ӥ]/|oJ]jg'3E2A". P{gB7>10#=W 8wBB.+^̤tȘLLU`YGc]+( \  C YeZ6wN//4Qʯև Q3\s>1aNq_㦎# yavAJ2hk",O k7?Rםq{޳uaT*S@~]  Y/3?(8ev%X0ުKˢK+>QͰj Ӣ* a!˘(؊Xk|WC1D_U`J IDAT C:WXYh!+IfjnߋEVv4ǣ.>BAAg<E{ދcw]lSirJZc>U}^J\=t7uϹ<}W ˆ:K!`1>ϴF3kX]x,MF {DXZ񽿏J]wqqr r \a#H pNn}CgĮ3`Qܗ,2تYaKr 4&n۰F^BWG(v0t 1p"VΠ,hեn9C| Q>Zڂx .'LNQAbkõaQ3 Dhz pDaȯl)a{}u݋o~ ,,NA%,·Cgrzk{ΘȄ``NfV s*,n8("no\JC=ṣhn8](@ cgcrTt_/lRagr&d,ӆLz-0' CfvǓ٘` U!g@׾J.//[nyI A]o\:E^g,z] e)8XaGb\fZr]G׻M^319cF9HD_ݏn#2kBZvmZ0ZUk.C9G:X~3hR€՟֙7|ܰPA\;\vzY',̯.~T+++8tp; 3Eo] ߊ{f֋\w(CPf wr;sUPZuT^mF&:Ӭd_[ `YVA". Sig@l7xӘf "7<&"^ ۇtx$N8 G*^Ay<@7VP@1`l0.0)_Ab 3xG䤶~JcyzUC g1mӟL丑8/絶(Tl:{&nĵ R2X A6 j\*ӪrQdV2ԫ w01Gbx \1"g""bmZcfMД!a[yoQ,SQ1c u6h0d!',HKjZwe))1[[q8|0:R A~aY/'<86^F`h4!ۂax\one 屚>ke) P-~O.p˽C97w&?DC>A?Ko(Qwq뭷b߾}+[q. D\g}p[\r A p|;|&*k$bq,%9Ybl^<V)j%ÁhkS B(܋j]̝Ercئ4:q2ؤˀֆtMjꞲ~BZ=s3%ڽ^|G_{; r VA2A$<rfo|cP6 l1ü ,08Z/zA*\ WqP9f53[$b,9VfY ۑX.c>/֛m7b܀K sƇg|< ֈY6ԧB|- x>S Laey 'ݲ7_-u?7|3[꺂 A]4E~./zA(ѵ6q$+ƌLf' Kϖ :Z, afvXc10ݑn'%Ա.I&GVYJ*g we?̜^l I !@BEPQi HPHHWC!B( )m?,߹~"!^Tnkip뭷ӺD g.Ѷ` !nAs{MvQUT h.ˊmՉV"n73͆5BnOFs]2 YB |q靽 kֱ}m/ּq{fk?cq|}bs޳Tkn=oh%|h}q!4D~4D,J2[ }:n6D|hN4`&jvQi@G*w%yYY T$9z0at`]:T;A hJHeC~Y_Ջ2ڸo#eY]vV-3ߒd^L14&{0mu>kFtPdYW n,tgOX`buw.3o"!x\ $<!@g&_s400'[0XT73C(}5+S2p5`fn3ldVaw[aug= !UGh:;3̱4BX}Ί7-^!`.! 暵ҕEb.O^X`"u3 mD#ׯfo>{f "~P*Wi*}9 Qu^ςY8: ! 1*aLCZXBO6_=_4c̋}ܜϬ缠$I2TM{9`<+Z* He1gRσYf1X&YQIK hd:,0iL?3n "LRtA M =ҌZ?T['9ibD5Њ\Lnc|06a,Rg^ܱo|ifmwԹ~.kM1M 0~T+m \x/ִxZw}M7݄p8i]BQT*O?p+"D4# ne W lk>'1jecBս:[secc`M|Սpˊ \|oR|ԑoPӺDCI eMD#b<fӏƷY_2̎DwU׺Լj]1BhP.I-Q3P h1!P 73g[޹ley,ː`u+bK痳;YU>>}_0q5Fao֩2x6v1g<gϞo2ѧ0>bZ-DDed_u.FጣuQKeIYzdlg+aKa-pnPWT 5R|7\4ja$9:]qAǺ#I3R#:#]$S'SLa΂a}gӺ|0nMij.shoJD#hYȸ[vmbYIaK,d3(sl0l~ d-Qĸ ŲLpS6^}܅qeL}GH| mVl?͎fan"V̍5׀U cT}jh]{s Ҟ=Cq3X&Jg($!x%he=zLj "VV5tHkΪ}Cݐ0ڬzVgZKph; RYEg:M_$cL8^`hVa?8e>~F6iazv޲$WC1,gH(H<]kgM[\x/ڒO~?>|>KعLDœhd8n>q4},7Km5UЙY..K$Hd m^K\h#@(U`Yk#>ӸM}yJ[Ei"PsT %vWv;m9$ `Q䭥՛1wHe={GꫯfLOLD ,+8$"HL獹:+o0e>o~wIjPO&|nxh(UTnX6abm4k~yBjm9k69oݍ 6B$HQp{ѕyZ _ZdD#;x |q6vf ""ЕΣ\^}}|%n溪jڍT>ggBDCG¼3uZ.a!旚F[! eS5Xv,K aؔO06 _\:.D?b!aE4tʽf7fS̍ Du- p_07[kԅ`W;A-64_guV |hSx;p֭!+zZ_2.bD۩&';u,0Cs%J'[c.Rog2ojnЧlYMdYһ jaI:; seY XgNr 0 yʜl°wA[󓝯l#4e~x ֚8ɗ<{I'G?e~D_ ftljpV:YTauK=!J kf,FYs0לW!+h٭LÎ,*M>0y֝ Ndk5\zp8_% E-[6;@٬;h_)H#^c?܄sߏl`_:?^@h[qC?"/Dn- p)eD44!Г-V3az}73dM3Pm|V{0XAU]$X4hB3CE5sʝ8\snuTew![#5pZ}zw*kZkM̟["Wm@X7M?`LTC\&bL4;nˇ`LcDDJ+ogHnH32Yᓹid{Nv7H#}D,94, tr(Uq"Urf $AwXu@Yk^y1 ^P}gώpk/^`=~AK˧v>lD5ƙD_<ىM\s;Yh$sz X`j߼l5 m2x8x8;7hXPkI\d}3>@Ϲ5+Ydclt: }g'g(;HdM1ȒXӲ>nr~83,D_ A\рJ+nJ{"LΑJfc "#_?LŲ }^$IM2غcv!kȒ8 X$)15$[%:> GdIFs]~Z{uTV?;w.̙iMYL`˷0ph 7 {lYS*z8g &29fxfػ 4@,@<5zKS+B2omJk130]@!k]h.g󢑾 EQH:ma@G"↛.x5οT6X>묳p{ZhX xjt4g(ƹTGd:pu׮hO [Qy5F˼BO40\&fS>vf "OΗM( ٮ?0>3a+V#5 $,ϫi5`ژa,I r>7oKcg {ijT3끴Y_?:XW'9NhG8%>.ݣ[g⦋.'tA[pi IDAT5F\e26 nWe (MJ T#:aZ^Y^VFeueʒX-!W,'kSs~~o DM@; D[}Og$P$ MunG.cƿw\ziM"b2OtKp;.8dpIn-ODU29=0թ((>+BfWj5Ē$! d42Ketgrqzca̶a6 qA:\j[?/E16,gYWd$aq^~W˒$/YM"0\&N4t `|WfVD4Rcβ}V3aڸZi"˨f2 Ųt k97s :&0YP˲@@ށ5!f;uX((7s~Ixٰ,2J{%"nGDſ1Y`5w'*UTE+[&saHYq{;R^B<D<3Q,$Zaold% 8`ks{9d5FyZbarV?Ӛ,ꫯOSg.3s"x MWskɭM8{<VIқgWC,Y]gZ&OAc, m;r##18:r\4d=̚9&CIƅ"s6w>EA"nsoyӚ,k?yOQ_>_Mc"E40\&z8}0nCD*[,TQ>|I Fcg/i$0ohQ5 ,`wVk|`T17-e gZՈW3Z s͎ecq!)t7˿=iMEQpu.m;x =M4{:ܩխ剈> $y۬dx @56B0K aX&ЙC^aY>bFrt W73GD;JAfz4`[BЪj Q qk=_zuq2eǸxC4DD[%/> L,6̀KO #442 tf(Ww[_ ͶcdYЃ^>˼*fHPՊm3@XMohƾQZh 3n 7+X q{Z>YefND#OtCp} E: bYEP=߮TZ5$H)BP q/ф:YzPkuV+1ZÖ/ϫՀZZNYuw?{p$!S8_eBw>2nK 曱{Z;x ׺Oqɇ1!U([8flWoB P\e"/;6srY1œl~Ъy(+ڻϰgvE;;r}tuCnf゙%W عL40\&.0֭O;bo8ѭ剈JX6>:]fdAx$m;lTld|圅l$v44[qW=FC>g+yuZi uD7Td B&!ExӺpr f͚i]"?hN4p[kGDD[TPTUu#I@4G,R!-/!W@wT 5c܌9M{f/ˊb6v >0k qSyZ7`kӺD.QD'ܮ,tci"m  !7#dtM+5bB5_@w1\$k&cM@eIlhOAc, 1nӄuzK_Xiݺ:z뭘6muh8\X 2ucZf`bK[m3DjJD<-w"A3|%yiB>]@i=68K9C^y74!?Y_+SygGTMw?Gu,YI&yZj3x >׺xk"[K:S9TT*fk),^܈0F_hl`ٜldFȊc}sC,hyjWnss3,YwӺDT[\&bL4\`[slo<Pӝɡ\Q)]5Gb $IsL$I}Œ$BOMlGB`S$akT'.#xmii%K0aOQq2g&\ȭŧO#gMuky"""rI2[@T{%֝lлMBg|>gw(y2q\@e{l7&"""*z2y# MgbUU{=fmg,AbYV+33['l. t\\;Kd=ӺD>ަs?3lȋ7ADphp$?DDDTc劊t[>BQ|PUggq.cKYBf~h!dI昳\}'`YD~v+|yw;yW 4"De:5 0}X\j,e+4ֻURa?e,s-|`R6TlY1IPd ~91r2~xxo$ixb477{Z3@ro_4? `퀼+""6H4B=r_4Dn-ODDD5i* ! !lVײqLe*[\eg( AAn7@r] ^_Ӻ'OŋH$ߐvpk7l2"rɐA4[n-~q:DDDCBW:R 8^T>gXuv2iRus?}nsљz=EP !VH8֚͞e]pmӺD40ixV!62.'Ƒ4XOBbN9v!1"î EQzXlv7k͚|9Qb 7`\gz?Yiv -B]]uh kS@iXsWa3Q0\&8;Ust$""R"r2IV 4: }MVD-:r6f.]u 6d8kvOXp!uh` p6 Wa36cL4p~ ‡13'uci"""|t`og U% umr5̍̍B~=T\;Ǚ:={`…F%7Lv 6@-]%F+9n,,K=7&""+kc2w#+$#1O84h Mlq0^'UIdtD >4Đsþ+܅K=gΜE1X&5)= h+ȟDBn,|S1;y !K$bc4K2$V7P: -قi}Mp0Q1$GFڐБb΂XӺ{7,XH$i]"4hG,B3sRσ|3,e|w`p[ 縵Gkh EB΍z,;IV%H,>  ])Y7|ԑn &;s x"AggLԱn,MDDD.had{12^:sx80ZbeM4XLb-K3iC9/\`cƸ4 2 p梥J={1+dLDK.ŝwYur 5 6m±{'M£/w>sWb!Wa3h}D YDDDDC:pГ6X>pC"ygܕǍj$I=f4bB?=hor%A{_+>_ 3oh0cL侯pe 0k87&"""G8s}H =p饗2X&"˗K.\&d2X,Dnl+؈T:T;5G1{#g"6al~d`p]Z/kh(x{mκ>EOx≸袋,իRɕ}'`ƍNhjjPTށR!FիQW.gb͈֔D"hgxقas^%J87]p q HDDD4ȭX gv2ywBs'3X&>qgӵu<Ƣ((PUPͣyfLm:? ~?r0ʹ$fMo\c1ɏw =Y+k߃b]h(cL?aDDDDT#o|goSN9\p$v"M&g5kָV##>̠ ֝&;:1q466X,BH8 IL&i$VY1GT*k׬Ɣ 01ġ3v?\Tε2a2{F5\8Ϧo v- b܀s?o}[`J?|曮vơ&tw'9Sƫo}mǏGww7o؀N.҂h$`0@ bߏ.n/#$,B4p2{ g/e"""A@TwuY A4\uUXlu>3'G,C*Bcc#6l߇`(@$dE;(nGc֯R xR ! !Nu.^Z7 /f2;gM8dNn,MDDDD5{Bm|g`ܹ  Gu~׾0d bB@&ECC *R tPEcN25cql\B0sLh?=6߆doYh8aLcLtc9G0 J~w-Je7r;w.̙iM"}ݮmh|L|h#H$1cFc[[zjDaD"r9qh?Z24dmϠ->pppgQ 8|n,MDDDD_oƅ0Jo: ~5hhxpM7ZcbK#N:h:Tx<|Db7l@$ERFRCOOIӍvD.ETF0DWW'FL&|xwF Mn bW+ C@ah*#XgkhzvŇN;2mѲepWZ.o~?$HGGTB4E4,fd`HXQ rR׬A<^BRP( _jn~++D}7 }8nin,MDDDDWG~e`ǩiM"{=\xᅨTܛՃ! N\ŢA (PUM2 4dY$K8q"477!"" Ol? Deڊ .n,MDDDD?Ey펺>~SN& 7ny睇l6Z߇_<BhnjF{GNӃdO` hjnG}`0`(h$MXv-ҩZZFAT* PU|w>6wg\>pe sLT[hpcƲГͣ'S@2W@jP5 kyχ(}!H h(N;!""z{}\GiKg) IDAT.'iM"$=\tttVC%l@TtvubVYD>$ #L24-T*C,&*2꠪*TU$ } X,B41\& .mNe(7<ֶ%L5@=ټ۷X}*"#1FcB:/3ϸZ&3Bh4EQP(!2R4u(CP@Ow7JjBTP,!ir$ P+*갶OtHp6ZRSs'0B4UMXFWm~}Ն>cF\4bJk&6161'""$ ?=)2uQ%L/_xꩧP(~̜9voz7 .]j='ŗJX,L&x,vAĢ13iqs9( IPC8X, ͢\.# ! J]pFƥ$~c^zPJ[k!WY#Q"d#h6ɭ DC~kDDD-/nW`o,c8#`sh1\&~xo;K3%ƇثbcWzm#Y0~TM`rkm1!6of~5||q!xZhPU\r zmzW\|5~W[osRK 0:||>uشy3$ D"BfA('Pl>EASs6\.c츱hۼdrhh]W?;ίfx"D#ewzщ-epw;_~VoC.jb1mB @-""~W<_tu !~߮u,Xٳg] .k׮駟jDB|]0cId }Jši$HPU~>E D"fr$CU+I&ԄbOKo/hOYh$~D'x7>C>Xք~/W1 ,~s~sU-y1u\ܨ}tߞ qxZw{i7x֭CRӦMî Yp{rWW9We"㴣Asԏ|!t:D <ʕ F57!GdiBailܴuufF#Ch Ѐw/V\> g2.mcDkh(ߵz&W,/ϭ_RvhX֍5mw述2nTy1PBDDC;u nf~o~}5j8̝;fѧow5YC2D}}}M r;*.2걭1sL U{/ښ9Fce̛7wTD H!4d2Y!4d*DLhlx]ݨ !,s(D"ؼi;n~+>D.X7>n,]Vnĵ'Xsx0 +lfL,[(U0Fv=;>/W=q-`֬YK.she67nܹsqM7q4mѓO>뮻뺹ٝk\?7s*1 _J,E[[;Q=h.j, L0~ΤєhD&E2D$jB3f4ΎNH }yNe"O0\&vzѽ&Ա͵^voA? 9k{`\S]5l5e&lr""xBy<DpB瞞wϟi+sF;꥗^e]+c˷z+~aWk5y Ir&:S97U@"DdaLƢ'Zj{8LlidLDDK3ozZ7[onic'?IMKx;UV3@*HO=|j[=䓸!pƸ:|PA@>(>AQdryhP0@ |AZUU~464'$Ie_B ^G.S㯾 HOY?e2:cҠ3yCXᦁ~+DҞW#迸&̚2ݼӘdDD=Xn]]n6뮞O=T͂ex'pꩧd-6l؀>۵`!,꫸] "Q{ tt#bhi MSJPW_RP(MhX~=|>?ZZZ҂m2jzzűbM2e  ع֋0˺$.Zw |U/Ͽ??omCPFcfՅsbƾ׊H$\ᦕ+WF>HЏ3~h((JBhokC$E&F"!\>h4lVg.EcC,  )Gi/Ō+W+N#mj*3NƇ1/zKf x^&&pD̜<? w&{_uxbL2Ӻ_ď-5]L&s9֭s!um޼{.2k5>:b/3 >\.h4T2`(\6?DSl-ZPBhM,*P==I$ r9tuwۻp/B\Mp1\&/zo'^1*J!Vnʍ]q=m"6F5p^30jq<ovH$Xd &Mil2\p삦P(`޼yx=wϿoȎ]IR8sZ Ypg^N jE&o@*¨סrUU* ! P( n&W/Yyn_4}"DU7=rT7TBk}ϻ}ѠV,W_oManp; DDCZE=_{ӺXd vqGOn^{ CTre:]R_^{:n!9kT* /ĪU\vqM(?~<* ŐJ%Fj* =dBVȊ ( !JϬ@G2/vK4O\5 jh}4v_e? ܰY,0܌GCC ~?~?E~wTϜ9g KHT@0~MDD4QD6Xb|$` ҋemz=cNQakvŋdÜuZ߹ld2ﺮ |>/{vbT/8QlR#J$X"`)c…iӴNŎ;>}:᰸z_,Ri<֭Ӿ}{^xv4Md֭BsFF`Gh4( x@0@vv6xz/E$%ɥjшjQ)fCUԋ`P%2D#˒tjE KtST Ec<q8dXh߾=;vcǎtԉ6mSf52 TVV_UQQ!tփ%l=X¼7qA6 ޖ !<$I'gU~~> .0cϞ=L:`0(,CQ "R:O?4М|^|E<t]穧bBs.ކA[c6u*#$ "jAGfuu`0r16,Jѱc( XaŖl;$nX#R{ E$˒tjΈ5 wS+f_~ӇN:($l1=JAA?gu]Gra=ѣG9rGرcI#__cW0[avo(6nQI"I$?u$-[d…l2cL2E1o-4Cj>^~ed _wѣM C$&Olbb2X6>?h,hBQt]#p8 vx"AMM _;`,IN<+I'/(Z5tX1glʊQGk`)k)s 0į=ze_5Q8Tx?9rD=sI\Э VU~oJ$5H,ΌWŞ)mժ/)=b„ tu҅^{ Us _x'f8-ZD׮]戲|r~ hŝW_'n@ p8Ʊc5 ۅQihh2t ٌxo4| IҩeI:ySyɾCz1K}h}y`$%yҺukFɈ#8p M ٿ?w>kϞ=48 fh\ܳl!I"h{^{4uּKiѣ?9nŋӾ}{9RC PUyѯ_?9lذD"!,#m翆tMTJ("f(Yfp\ldPQQNvv6u&^/;udo$ɎysnB{~ 1$d_tѴk9Sd_}~fUyI@9r$#GG4ٳg?w%B1ݾa;0WZ_O@ItƸ륏mqJs۶m… Mi(..fTVV q:,Xݻ ͑uq=i EQxꩧv8={0a9*{nˮi:J0$cZF#L&4M`2q{<$qb0FXTv^/p@0HNv6Sﬦ'W;H]RtI:9&H EWQϫ‡_H}r뭷2f̘`v&u]xMֱ :pIto"H$`$`jOoߞ QZZ)++cyիy믙2e hThΣ>ʨQfRRRm&tldȁt(l8Ea46ivSI4nw;hFeEyy-ØLF, 娪b%%4 oza.Ij'YI:9mɾMC=mOhݵNavsM7q뭷һw^t4Mѣlݺ-[e/dC{^9s+TSz $IjN(S.eۡҔvؑ QQQq8~ʼy8sH?~ 3fছn!Jmm-cǎ1q-{_M`pU.]?~Z̓b„  QUs2`9RPTTĸqㄷS?~<'N!J(bĉ޽[h//K+nn P L&PHvvH4BfF&p8E^@x"N#.x"ް]Ŭ$^"ez!$>Y\3 t9 _Y`KXlٳ'wW]u4r$ |͉b͛=]ZqI فΦ^$IR Fs**Oin׮]?>OЯaĉ>|Xhlg /#5~픖=5p76-gq^֯['gmK6l2cZ):vl(ZB$ sbBAt]bZ,|~t]' `Q*Xl@)F"IKʒ4f'󂿺/w_=$`i5S.֟k={2k,F-g46o̖-[meW.=#$I(;/3 IDAT`I/R@G̛7۝Q__ĉ9pēO>E]$4Gj^/ƍСCBsJ}Ѵ:XtМ{sZcXX~?P-\.bxIyy9-Z `tL&#xxDZ%I.bҼ*={^r4t|>&M޽{(Ÿ' &4Gj 'OfN]O?d#… yWfnύC{`6P#Fuh4FuM5x$INV7pLկ_?{9v{JsOG0dԩl۶Mh`GaԨQBsA4~VZ%4K.KiuJ[f 3g:ܺE#qQS]$FQUUU4@ mN Āٌ%q222̢~N /OҌ,.K tOW?> {S>ܸ'I}႞fw6n_Mǎ2Ij~gSUۅv"ד>$I~He}/,ǿ߿?>,6-#3}tl"<=cƌ#5=]Ȓ%Kxݨm6&MD4*nniU5 NUe%p8B0qc h rד :, p͎d̬,\m܆T \㊒dqYNNw6b`SPNcұ?eٸX4<ܧ3=;6}Ͼn.O?;/˓DyܹSb0@ޯۉ,W캓$I^y;^XBQe]Js3ߎa?j ]o~ڵkG}}=&h8'#j4]GUU6pGvN6HX,Fff0defR\Rh6aU-|~Vؽa`IIώZ&XX̧SX.3uR&Ϡhd=h[skMRjdgg3f̘/OYjl6-]Kz':bD\ڧ#م'I٫DŽPR]AObXR{:b3gLIa{o!v7o^+++:u²ɨpo( VH$LBӰX,*Fňh$EujK<@4LFUUUM&5k$O:tbw fD =}'$=x׈+'C '@UՔ枎x<άYXniӦ_Bx<,]s Ͱl̝;7m~MOM1͕ӣ] XTM0 nDU-dgu٬ B9f~?Yq,~ &- O1[+_;pW!$%˒}Lgv ~({cXRד3zp\,w.K]v7qFٴiVbժUl߾d4M˽E|?S.ֆ:3g)Ht*g K(4wСms\|~\N'^]0XVN@baNIn^.UU@C DɈtD#A23#OPIoĶ{xBd$I!˒Ӓ^\rzqov_>EӒ| P;3 2!C0{lYz5VbTW'L'4:º]GPMFuo~g{?d$Ij:xa u^z&iƜ9sX|[nI& ϑݻw3c b䞚W?_~ Ν?un:Hp$B^^xq hN$ftSYUQ! K,#PV^]@H~cI2INw.dh<#o'{ ֖=c sYJ7зo_̙3gڵ'Zh7S.},j#w媁X9$I'*a KS{W裏b46OO<ҥKgtML6';JgÇ3uTAN"曼B3溋ZXULUU)$##RvBuu VbH$Øf6;@GC4%++M|k!*F*IP,I?Ibxf-Jm0o? ԨcQ,;Ø1c3f ~>V\N4m%Te}?lk\Fƕu!!$Ij&Z]sfԨQ̞=GNs573{ Ϻk{ea,QZZʔ)S[[-4C+V h//dĜ0i: , :XڲhH$$i ͡ ?~?~P0ħ;p c#"C$IJ=Y\ f5^-5I5 ?ڎCUeI2 tڕ]2yd~?V⣏>bŊ?57J*ٷ>XDž0z`7.Y$qT1iߨ{ߌ3|0 /o1cp}Y)SPQQ!4g̘1i~ӦMӣG.\ݞ;v,GaT tI/zoEUU6r: |7`-[RSSCAAEǎj|~?VF("xTzY+BO z-IRJwS4|OM2_6϶czmuu5f$jkkYb}V>1ӎ1v&Nfr#-,|}iWX~wxꩧ <'|2mwJF4fϞ˅oߞ_~ &3ydo.47WOFKaa! MQsrbĉMIFQzX,FNv6xϏnh2:L$!ꊭD`"IRӓ%Iba-Y&K>hOr-uuu&}&$Ln&n&0k׮壏>bTVV6'4l?Ěp-יӫ]>iV$4}}8.%4[oeԩiWX~SRX8p,,Et]g^X_Lr"^Xr`Y!p1Zn?' a2|SS[K0l6:VՂiDb&h لCU-eхR`4,IgzW%I8 Ӧϸ@{-cd^``y6?if_8بn޼.]$m-t&J$l޼e˖Gq@85gs=ЍQ~$Y|[´&}O;v,&MJҥKyG׏U/:[,^ W^m۶BsDu_h %-ZDX,DQX-V‘VNuu an7@hm2Uj0f`2Q#+mlw!$5<$.d^ka.=s K_RKM\97Z%wp7 iժUR#IgEQ(,,dذaL8뮻-[R[[KyyyS/O8o0oKx{66+Fue`m3$錳q1!ã̈́ 8qb?cy9{f ϒ{{NhngtIhH/7ѫ] n=N,h2ǩ'99$ FD"lq D1fC1q8a Îj{X!7kDHԼOm7'm2X&ɼ,1E;Í0f:vIҙ-;; /cr7GuuY:g;Tד岓qʶtذ_h,MƏvUVC'yѣp͑+V?Ah(N~DDž0h4 TU% bZbhjkfv;` `wQlHmm-qw>IBj0[d$I͓,.Kҏ<L-=ЂK4gzE]D"I:;pE1~x~󟓕EYYM4j!6)58XVfe;v!J곝r 5}t~_43֯_o[ N:`<lm6.b14gϞÅfsNON<.}nrبجV 5DF#H]b1v;u\.&3 -aUUq EjH<j r+!}A/IR ˒5"~H-l`LFEhNBqQ0`C I$f0cСL8ѣGx8~8M[LB0s=pu ;Đ<lWenD͆djzq8N&Im]-yD"a0B締ENn`(d&Gt ?@/$~o@Xd$I͗,.KҏЧ߳ꘒ{}Ezm^n$1 3l0&MW\墤b^ʱf s<(,IŪpkG{ ̜9n)ɰuV.ѨМ֭[K/#4Gj>8qз~{Z^uu5'N*C7Q+尠Z,ddd`‘0.Mxˑr'JK3_$I?J%MxcEQt@w*=ڨvܙ*+$?1 lْ.ɓ's饗b)..)@QE-K6.eKڊy)/,\)Lm۶1m4aZlɢE#5L8 >Y7pӧOOۖU`)Spaa2?[f\.755hfP E%4V+c0躎h$H`;}degt: |_(v?@ߊ $eIqMY&F M41 8PL[n$)  1bSLaСl6m*Hogz8f6$-jc%AX`0Cq5פ,3YvŔ)S[E,ZfV2i$wd1zEIoc{/۶msŽvc2t*F-Buu56 łn'bw4(b`4`tihpb.>ٸA'I>"IRzHNJ:@ʨ!}tٛ$_R?=^x!+V"I#石d>3~naTn\=[#$ڇa]?'ˤI1eѢEnZh|B!Lۅ 4~Ypq4Mc,_\he:3 "(Hh4Jff&UyX IDATdeekZC  i&6X,F}]f hh4J~A>޺z-U[E  IRڐeIa 4;L#ބC_Z){lܸQ$I:]X>|glӦW!$6b;SZXVٳg3jԨԅ&Ɂ0a^奔:YYY,Zv ͑X,w͗_~)4w̟?5}[Q=B3.ю]؝D,FfQ^^.`E$RVV_Wz-vC~W_xv%akHRZgy⽵)4.Kin21~xn.\H.]H͇}>z?5npEEEZَ;xwxwE&°sе<ԓt֚oxԾ]2L< 6,PRR㩨8p`z!4Gj^ϟϫ*4#''ŋӪUK([neʔ)b1aY.F Ġ(b18v*\N'&`0dPS[bE48:hx<0O̤ B(b+s88.2D$?9I5* AVQvbM޿?I^$IYf o˖-#:ubq1I!os[L̟'.&Cii)<hng޼y[hԼv v*N$)m$; fԋlO*cOʶElqQݴiݺuK$Ij*>>~?o©f#WׅG^$5+r3>ܐLUUy'ޘ\s=矟d;ȑ#Bsf3O?4\pyټy33f  0 ̙3K.DXhhӧg9W_؃Y6^/xaPNCx"h p8 77H4 n؝l2a61LD"QN t EHtfeIajb6:-H}GÇӫW$H`0PPP#0pT-} DHth-Ij:)).>qNY-˒t`ر\oYfѶmۦ^VRx/`2ĞRuxfMRX7o^Z ӦM~_Q̙å^*4Gj^ʘ2e7|3v ^{5}]=!{)((@4"UU!,V :^/>M f3VA4#';ҲRl6;˶> 򖊀1'$Qd[ IaS<.;Z劎9mmk{Ó"IMff&]t&M/&Hp"SIu/wva@햔tkygQt/eKP(Ĵiؾ}ٳ9rywIIМw(JE裏x'f7W$%a41 蚆b0p8lxXTUUKMM Pհ3df5h(//Ǭ,۴=*Eޒ 8$2D3#I?. OtnscNQQغhnuؑ1c$}M$'@6m~Ƅ (,,^ZT|0~*oVn2d )hΟ~#n_|s9'DٺU.|P:L"СC3g4~0_p ZźG~|)2D3S!I2Dح+;OzBnF+$)YV;<Ǝ˨Q0|DB{ L4`r|;_<ݢ&/aЏ$5S;RvY`ݻwOin2DQf̘3gL>ҩF}wğwy<ӘkߐvӅp-fx_f#HLٌf X, ^Նi#8]., u0`6lAlV+8].Ut$Ig&Y\ `bUMJtLR9\J$;p뭷 X$Ig| &MDN4ǫ[>ذP4Fۼ %IDt}~;,X]47bX~{nIx|$ >6l 4{ l{ʦI&eXU\ڇ lV+z/>Ʉb]Qf( ְ1'';h4lƨ(J0h4b2ʢՊx<L&{5sIl,I?QSNhW'᭝cBS~d„ V$Iҙl6ӫW/n:V+$<3f"Q6[kqLL7r34M7WlޔfeepB:wd<]Vx֝w-"4McΜ9Ӷm[,X#RMM 'NbdTnpwDUU5bYq:N0a1Ɔ]G8¢Z  !RTZ;&:=f QĒ$5#IHɄ)oӶ|vʪOuyyyItfF|O>G)t↋z1z@7VYJ+YuJsYp!۷Oin2hƃ>(0a,:=oМ<^}U ;ؽ[܉ peƒ<>$Hv(hgH$(B,#Ht8tQ1Q 4M# a6P UUU(WovOF` 4D3,.KSdT_34QMةPUJ r[$IIP\\ooqF[̌Е/Mǂ^t'4x}N\.\H۶mS g㣪2""R;Ul֪JXDzۻV[.lZXnTEQYu&YDښL>G$\eqmꫯ /~iC?μyfB!,X@߾}dfbٲeBs~x< `=!2 . K,0 ~?іN'i..&HH$hjnFt<^. 2 B455tdعs/|4`I r,$_ pm.l} b&6}i̜9HyAN9Lˆ#HRl޼uIN˚kVr|nzɦ$L4'핛s[VV#TH$in.vɤ[G q8χamgDHteIڿn\  ]ߎ Q7뵿/ \$I2UU߿?>&M;v(Ojgy㳍BbB*Ǘ-9-//Gg9g6{//kCҥKۅf8N|A>h9͟?{Nh>e<;Nq:444* ^x,d2,BU~?Ng똫h"4M#bA(Fw((OϵE^X.2DC"IUؑ{r x"1vԶiH$cĈL_H"ŇkhS"q,e&=|5ݻw=z4#ض< 0anTU%u?>*s#G _={Ќ}Ÿq:Ki\.'8}jdYN'mitp8q|hNqq1X|V,|/EHtheIڿÀ_*lpnx `Ss,zO~|U¢( zsaՋs%c]EzJ|t Jg ]*={RUUEnrlfܹu9??|Yj\s LFho~ƌ#4Cwy[oUhF sTWW3m4М3g2qD\Yfa"+ƝIjea~4C<'Hb.;v p8x0hyӺ/rOexՏiI 8Ȋ $%߹HUXn](.*ݲY/jkO>dN8H$mÆ .c̘1dY6mT𳙓,nsdwC%AJ;t*_aEߟ*JJJrQy~a9FF;SQQ!|t̙3 z9޽{שk\zpzdm ]׉DiZQPXL&K(Hq|475J&IӨfkSvMfl *2DCl.KUXϮŔk7˶Y}~{cԨQ\$IRu֍s9/R+'D3- ;k|n^7JCP\8iٳp8LKK b&.tu6s6mT@0o}κBo,4gEH$$ߙ_KÔ*TUa>#=ztW$It<#F#F͛E/nwC7>m[CMK2y/zkZנA?>p8^{s 'prgIG*bƌ[NhΉ'~>d9s&_~МsF1zH&D[ZZFdE&tdqh%b&PMӈ'M@4[߇?,^dJ%eg"C$I $eX&[c_/hF$cʙg… Yz5^{-]twYlG]{? =TDikڹ/Lee%P(e}s=XX>eYzVZ%4稣{(9eq-gb Lj#b6**`Um!:dÉmtRihC׿ɬ**6qD446b&%%8.EaŦyk~ +:D$ dsY6Y;Ѿ$I^zq-n:{1N<|t ϼ4-xwQ#O1e΋ݾ7C a޼yv7xo] Æ 㡇͑:˲[YlМ/۶yy774( ͑ND"m(n˲H$Yfɤ3בL$P5 ӅvzMx5B xZt$I7䳑?*,ѷ{a[uL s饗 H$I]9#9眃mlذ IDAT&^x Cob)oM$Ss{QG1w\οGb~m~_cYМ!C0g|οY8ms/ ޽;UUU;ӟX`ЌJ9Dy$HUmyq:82,Ja,0M~?Plx">?%k=Xg*$Il.K] UXˀe; ;K:KEE$Irk׮; uƶmhhhwY!/Ie j [L"{rg裏f9}ü{p ˃ b޼yۀگ?OB3[nBsD[d4BL,i,Fu ׇ|{S0 4.UUIߎ ,  ZH&d24UEUK>@$ #*r bHt TTT|r/^y睇ƖUK>fOr3gî|$CC4_`sÇg{wٲep 1e/!R=O~ΝK^'pm rihllq uM"v޲eq2,. Mt֍{pX!Odhjn";Zw8d ~s-I 8ڽ$Iڟ$$IL*v2oa0Q@C$e2zEP$IR)B>}0a\r @jbXKk7˶ٸE].zP3ylݛ<> ^7O>a֬Ydm}G! ͑:W_}B3\.saȐ!BsD۸q#W^y%鴸hfN&ӵ+X ӉN׍at&$I|>x21MԄkH~Fyl-tLQ#pSd$Iҿ"˒Wst9GȮf6E:˲ꪫg&It0 r)L2Cm]xmTUa@b~B`Uc^ۧF]b j2М={RUUE._C8.4￟?^XF.޽ Ѷ\+ƟHA*B*0 EQii* vpb&i*c0hF*0 ʺJ(X{kɆ® EH$;$, g4Mca(/JtP4sE1aEaÆ ›W"$|v` L%r.s'HNsG\.WNs;ʪU1cTJh7պv*4G|VXY.U;O?]XF.477SQQAMM UU׭ hϋjcMUI$eI,Z244Z4MEAp88NFvjaKWEH$;r$_Ng.gh.g MM}TW$)}ǦMx9#]&R,x}9o}>NOJ劇!SN9`FZv-3f :2*++)//#u>ׯk~cg̘1B3DKR̜9۷ ͙0r=}dYuŲ,u{q]1lpp8PUߏaΝ>zAp8L":݁!Lp8>(?$IOdsY/i2Y d:R{\'Iҡ1i$-[ҥK8qⷋ~ Q&k5czfҿ>/bOcKNsO=T~lcyÆ L>x#ڢK.TVVҽ{ΦIۙ1cDBhΔ)S8q L䦛nbBs|0]$Jb6Pd2Iײd>?˲D"D"؄am8tM8tp(D%i8o%p 4pIJŐ})1v~WgKޱ]?~<~xW$ITEgϞ{\zx^֯_/H_6 ; =rb{]3~͹]ygp=͑jN*t+@qq1WhRQQA}m\xL>ڶwo!4g>L8{d2R@4hhlDu˶ijj4 nPtL6 O$i4]CUUl&J:%ˉ%Z_@P$&˒/=q[2ɲ~v3裏$I gԨQL2}m6]V4gyM: BH]Tkyz`ʧ[2eOQQYYɀHOss3SLaΝBsƍġGgAYt&eYFtkidY餶˶PUt*öm"(L׃5}|>]N{c"BOo(Ioo$I#󖿑5 Ƴ!Iqn7?Yl ƍ+h[kٷ8'yhI]AeKM#W<9n,?;(ۙ2e BscBs'HpUWuV9Fo./#<"4cP270b-1,B4n7Á ǰLMR)K(65ӲCND"A$EoQ$s=0L$:1IIʽ2\ާo\mXiG^;|pN=H$)B߾}O~\ufy},l__EsTUpU4^˶bN8 & 򵼻}bu!$I!˒~ߣPaTU7noi~qy J$r8`ذaֲcG7۶ٴEaW5t QRDOə;2oDm,tMX]v q\<s1BsǶmN zMee%EEEBsD۶mSN%76 z˦(s=^L"HгGtACCEUUinnL&1 ˅e׮]t-!v/`l6WU; M@IeI]ޅ{nwkFtnO*"I~\|Ō7t:͆ 0˜+|n'è˼__neܿ|vĉ Ȕ)Sؾ}Ƀ>q'4G|lSVVFUUeeeBsDgʔ) 5sO&uyd28NҩEEEi\.'p:ؽ{T=zNgˍmhF6f|m6ar91 =Qx%캀ǁEH$(N^#\r #ױ7?˄?/H:ŋy嗩ň#8餓83 .SggɊ֭8N;sOw95LT&^x!f*ŒTTTyf9￟#G ͑:'|9s(**bOhhx+7 P NƱ&ɶRQ{itF#8PU0ߴDLlf._W0A]G4q<="bE\ I!05JX:t{x\Ƕ]AM}ߟ/B@Eҡ¶m}Qxf„ \~$$Jh"͛ի] z]L9c;cw4-DUU1 G2rqIh_=G&&Jq{@QxҥK)a( tUU0֥xy/OmsBeF"C$I:!I:aCQV'-]O$־7S3gjCa\{w?׬YO?ŋ,ArrT$冮 6I&1j("6mwYΚ|y7Ͻێb.fd&.Rꪂm,b1O横w駟.4Gz-nv<@,nw}WhΨ}9^UUH&躎òl%}g=F*"O (p8q8tx˅($SI$PڽFiyY{3H%I:l.KҾFsZRwyIcegm# X_d ?msep6v޽,]JmF)//P$叢(ݛ\x(ºudr{#uUlUOy8@y8rM;R#_~9ӦM+ωD3ff9pw0zh9R_Jon^|2re,ZHhdr\.pd2iNKKpq&,q]؀d2TluIiZؖEs$aC!bd"/+@$I@,Iuh8_\Ƕ˞6ζmJyzTj_~F6eʕ<,YUU4h*\Bgu'Ok׮TWW\Onm⥏. l?xZL:ɓ2eJsM&\s59u-0~|H5k꫅߰Ǝ+4#}Y*++f مKF\.b-14eH$Ri|>/eIg(-adU!HN, ӉvJ- ʯjy ܁o$IH,I: 9yث06Am}I&QTTID"\@4JMM K,ݻwӣG.Igs1ձm۶|.1pK45eue1d q'"gڴiifGJ̚5O?TxM7y',dRH$勦i?ŋrt:]Vlĝ fOOiI]˭z՜7g̘Irّ2 կ/ 9Hݻ>}zޟK/K.DhF.||BgRL=TB?lp]z}n^/`H$B%F(&N)++CQUb8TMӉ'446AU[~TU!Lz]#}"$ID'%i_#isS#uul4ǒ|]0a}؂SO=˅ڵŋ`jkkݻ7%%1]]2~x.2~?6l~Od&;3ֽFܼ+~kkYlo>@x5\Å^('|2 0-[PW׾5Mز_ESK݊{ c+7sK06:.袜fv$4馛?!G͹/L6+HI)EQ8Yx1.~A6_|ɓx<.l|n!WUʯL^]<4eEQ z!ms=+³."ʂ+eYzgyߟz+4G4˲ʕ+vT_]up(LT*Mgo]{r}; #H`. Ǎj456;t D4I457( >HGki63S!$I"g.KҾNGzk*|~[^{G0vH:n.\2?>+V Я_<)I =z4&MvZIs'QKM(-ѣ| _h>(z?]hm{w³&Nu]'ˇ ۶x饗tޝʂ_k&4#0)N'xxe5_\%w(_-7lCou͎

u"C$IE6%i_g'#GYp0muw=-vv֬Y\t(PcrI'gАIJ,֭[ǟg-Zavax<$;]ta\r%8֮]K:wYmHghv}L%k:> \p8{5*/_u?F<iRRz ;wBQ[O+itihF*Bu4M%  `Hǃ@ssꭵ® HrI|eIh./)[P>l&3p 7SRۗ.;:vܙڬzS]]MYY=zR ~\qt҅Ғ$5zϿdúw Ǘ~W p_N￟O˽ p'r}t:gIφ +I^[\wyB3r)Sw^arߣ *tFiljaZ456p:S2o`\N4M0!Rx$IR$k 02%!?#v7SmkMCi :{ƍd2.MLd͚5W^,;0\X)INe ii=<*qw+&>'UtpI'4osqȟ۷3uT&O%\"4#W^y%7oӏeDQZQ^ox˶( F躎$5n4`0H<,|^/@uE0t)BcSkxB I|eI|B ^6ew]s^{e;"PUZZѣ:u*={们6cҥ̟?m۶ѭ[7u$|s#/gĈ԰mSbδlnsbW}]Ä]ۆ%ȒOrZNinG{W;s1s9c?cv2'x UUq~Ԉ(Xex^,BQ};d2᤹ASSgÁa~ҙ 8 ]i n IY$k,pb>>7{uGt5Eli /P6ͤp88裹7n7nxF0Xr%O>$K,AQ;0TE\tE7֯_/tVGmظ?XMzv)ȇmÜx|9sq4-][nEC2{oIH$ԩSٱcМqqM7lo3jX4p{̙G܎{q 7o,va̟?`0(4G2 f>cA?Ϝ9sf ^ON=34M2 *e($hMMD#NlPU4D#C!T$/,[d$I*P,I: `EQ8zP|DY:e͵=S>|xW$I(уs9+rvACCߵk/G޽{ӥK|%Ixɓի6m)euJ@srQG廔l2~_ ׿*++ BBsɲ,~_4hs=(FvmB3CPxh妨(HcS#X,(74hD(.dzn" 1Md2I:& J%I%~B0/ }-l"I$d|[蛏63L5w'x"#G$q\qzd21M3ߥI:O?Gr1`t]wi.c=+aÆm۶\)J0d޼y 2$ߥO>Yf ߻wo(..#uNm%KիUUU Ʌ?뮻NecCS  fm۶*aD8N^nd2N$%%qTEEQU8bjkk)zXssl/.w2$I:)\}GmsxTKz1gIR(B޽9s)--e֭ybrK<444зo_p˒vQU?K/#Gk.m􊊊d.|\s5bGуGyR9R5g{9TUUlI&2.Oı,t:E@Qu^2"\¶,,HP5 *a&i# J&Q®kW"IY[ICQ kwttdsYgf%%%\uUX^z; O744Cq1pK/ ?(I(iŋرc]R^a4hPK9 Vꫯ&J )//hIOSO ̝;ݻ Ʌ{rW",ák!t/-&5p9oGb1[G$n+(8NU ˅eY8;'TUŴ,b:w߉4%Vl[إ< Iɓ˒+|pnxܝ(ܴj>}p J(׏ &peٲe h4ߥ֭[yyꩧD"xlW:4у'H$w :b0`@K9 k׮e6cl߽ ."bE1{ ]co WM5JI0Q4!6@u˓=ȅa^"wwϜ9YDOt3<ԩS}L>~-d2F_tlw( ''}zukKyRUUx@@`R]S繄! Ih$BGU5@UTRej*4躆eYfU똶ݟ.}Z![2,WY /V2@peQ 7n-⩧bjmnذS2p@N:${/-8p > >g}6@އ*A]]fbv+w)[dʕ\zr9_sjkkihh`w5Gt^/27|3m4kζ`YW^y%W24䠨*ݺw'IaErJ(R)J%MSt:8ؖEsK ( ;AnO(ؖM2$y.ɚZzmrE_ֽUBL. Uǖ+|uTǣ,+?|>%I;<*bSUwߝO<38D"իf.m^zd2vm7xb0|p~_.K.Tu:m޽;w;sK"WfԨQR)_sihh`]v5Gt^Ǐ橪r- 2ķmu]x _s{XEySʑp|>G:B(aD"477ӫW/El "|MHg2aB\6G<8]*z_4{sp!\~Vkf/7wa\r%>T$C aԨQ 4T*ſr4g̙,^*veI$y䑜{ŋ>5k;SK"~)#FD"ACC}5Gt^K..I&1|p_3yg}9N`ND(fp=`(D$!JF) JP4M‘T .Q]]iIU2I.>L<CӴM;!YF?/-C゚(BB|՞ ߡ.I؋&2?ѭsFAss9X3gvݷc֬YèQ|_2zhN=T_3Gyٳg1ww~_jj) qf2mar+<G#455  GE비B!^xgo~>!BQi,WN.WxW+~|_ `YǏB_UUU 6QF{ʧ~Z6[[[ 3gdٲeлwof% r!pEQ__G}D&)wYIϞ=at֭cĈl Hӧ{#:3rHZZLpYg1b_3{)Ssj9/q]USTt:M,oo2JD"QlFUUTU#Rmt=@ />G4‘--A$HuqJմy%s]'BJ#e!/Pфdw+Wf9ŗg6r"4Mc~tIU(7uL9˲exxꩧm>}]Y >]t;3+V}=Öi5k=z(w)[dÆ 1/לp8=>kZZZ9r$֭5c]Fo/K<-.H8iAKk+HfTU\xB@TBu O"$ϓfI&& w[\vM N8Au›S?)9o< \gBT2i. Ue= ȴ,46PSN9 pEH$]f3M'x:>YfN]ߙiy.+vuWf͚E]iiiaԨQ7 og4M.rVwM7݄Uk׮.u}0#Gm2F<# .(!lBU4U-Ն*lظt:븄TU%Rڴ# E1- Txwț~}{8 {-LB|UYŊ\tg;Ba=dڴi\ӧWls䣏>bܸqۗK/ 4s}QWWI*/_93uT<@_sDU*ꪫ?k_kζѣ}K vUO0hF5ZA"XaIg)E<< pl&~_0iZ8C&! R(C'/<R)p,Py' !6$e!k1*hrYb"gy&c{P(3l0C=$!:to߾444PSSSRH:/?5G4L¡k\W5g]w{P(0vX_(P:D,s=,ˢ[-L&  hDJ:.LD"A05b4$f8uPֶ64]%_f9 B@B|Uyb4e!ĠA뮻Xjwq{wK?1cзo_.r/^\\ׯ TWW-=z4+V5GUUnf k3gdŊz߿euȻ˨Qc=/tI'+PX,r嗳h"jn IDAT߳Z~#:?444F{ݻ9ʬYxg|^9l] :k׭ò,QTP(H[*eYT%t]l&C.C4 6nP(R(pRɦֶ]ͦe  ekļ^0x!J,We]iWec 2,ķbȑ礓NÅR3gd'? 1MeP9˃fհ5XŕW^{ĉ9c}?ϭkaqלmeΜ9f޳؏p8Ģ1Jv ud((= NbRiB0i N<# L&)E8BpB=sixe#pBIsYJbL.+ei. (!ٳYb7t[7x?sײf͚r$TUeȑ̜9h4ZreY?~qq '#:7x뮻כB2eo?2^{)Sѣ&ICFST ]B5<#`ZMx:K$ŶlJ&fs$I,e/1&7n$OmS]]iYdE.|^ !DW$e!VeL.ܻ,e!6_mm-]v .gg?'ʥ}ݗ;?Ć\[I~^{}?7 CpT*1qDx ߳.2N=TsD0a5n`Сfl+-bĉ}ѪX B"!DT¶m mۤix˲I3|g躆tahkkX,FF'{AX$TrPU9o.)ڀrPBt@}#"*R&s\TUeذa 6 6#C矗6o<͛G}}=gy&sN{):\ӧ&L_u֡i퍓vډ!CTskW_}QFqYg#:+W2vX'7Ç|'.~0g7n LR, esȨUSQPT&M&%١GOLH[[F c/2,b1n6F?C+S]zGo_ޫ\/?`;l2*b8/2gK~R#<;:"'Ed9䐭Zr Ǐ*ՙ_s={ϨQ|OKK9^x!#F5c[illceʙG|,LVJ8dxa,&qkj0"xL&CL&Y4bY8e rضkK>ፏ|q^"]]e?'2 ϵ: vG4d."_3\.ǘ1c|m,+G1"0b]P(:%Dn(8he[]f(bqR_|aFuP(ixKh7fn,\4bIsYWr%\~|^v ^z1qD,Y~Ud3lڵL<ڋO?^zb'Ey0XWyoxg}:=ztE[G:K.Fcƍ%[mdzrJ_sߏڰJ8Bt"-;N.#*e hnj2-8-XI("ڧ-qp\]4sYۜzmMoB4zrWTs]oӟ9sh"JuV68gРA~l(Et7kKLq]뮻 ӷo_NB9c=ƣ>kF^8zTWU H[~.ɪ*$--A:fv Ji )TWW娭Z8d4 qxwh˙~^jg@!':!:zHsYrӧ7|3+V.wI3i$Dž^[o%Blxٳg믿^12e /9z{% ~fsA!I܄Fwӎl6Ki LTBUU4X UUu ۲FAyD#4Uö,BF0H4lckM~!H> @﨣4,B'|2?(=DӧӳgO_s+W2n8J%#AN8d51֯_O0D4zEQ(%jmftM'c&@;P(HFZ[<Xq])/Էk<5Cb{%e!^YbxU!{r]R83`޼y̟?;h4Z6[PGcС[O,ӤX,J>' q#pP0e[tލ_~I6P(( lӲD"y,&Jipd}]*Fd̒UrgQ!|&e!^ٛy*w ߉($beHby~ ~q=Tv[%\kƶyoxW}9o 2:ޭA#hjnu\lB4L$B,L&躎m%Hc|!OMM L&k15?Z B?,+rBb'Btpb n=P7 .\s5^%`{o.ϟ?'Xӧ3f H#*okqs9OѯWC8J%LqlY]\PH(%M0X ӲPT|>뺸G[cpFt]gyyp!B!\땽YC,DT[[ѣYp!_wriii{{,ʹ'Sik1,X (=,x]weƌ$I_sDe)c=1cTo2sL_3z9yDb1P2FԠ`%D.T*t<# c m,ӢX,kP\.Kuu5F.CQU\e獼g^p!B!\5Jj.e!Qwcٲe\ݻeu+Ygŀկ~gYle{W\޽{3s]B/~8l0.X~}?d{U ((B>ñKh89\E4N,u=4U%L:&Nc8N*`۲Da澻wB_H.W( TD}}=W^y%-bΜ9?EU+cƍ6m{7'ps}Tl[\) >cbY~ٳ' #*S<ߪs-iV}rYt)W]u+kbap(Lnݰ,. f\.]*EdCk}z-~!fNmgc9+irG^b{*?яҥK8q"={,wY<^|EN=TɓYvmH%4,Ye]M=z@}kС[sOn<8|g3׿`ۗX|e躆]*Q,$qĿWy4D|T:aH$蚎8u2-,"JPdO &<`}>"4fen.Wr;ABlvqG&NȒ%Kx9#+i[ɓ'^{qiK/~h|Ze˖q饗}֭ yHl;rUUU[: w}7Hd+TU~---\z饾~5ܟD8i |df0%jED"iijkkPUueX,Nss3`dM znmقoN ?!e"e!Y'+pGb4:Ç駟fŌ7" ]o<v7D/Z+Wr%f}ͩ^z#*vi[L>}4;|>e]ƺu|PӎL(D0 4M'ҌzA¡D"a\,bFrE bqrBm IXmu`#0h3D!&e!YY;aZsKbA!ݻ7]w}?0Æ +wIgq7ҿ>l^yf[f _|1鴿FTUU;k:=\>luu53f̠~+WUm3a/_kvaUE R 4D(JMM lL6C*P(JI$XEkkxн;BB>Amu t\(~(.gպ?/"⻑߬fÐB0 ~.rJӟ8cs. ?h xd2iՋqF MݫkjpLVa ֮[K*[]( %D*TP,zg<U/Z<T*UfL3rH{G" kڂ cǎ3J8Hp'CqTO׌>;!hlj\JCXHX,ry,bUU) DuFhR()8yb&MMz@c[7Mg3@!xGHTZ-9Ov. !:zx̙3ٳg{Ͷ|rƏu] 'y;US7ndȑ|嗾B!n{o_sc 0cǒNQP(]?NkF'` x.F 8Ca㔨P0H6u@u xM:ZZ[QUhqlD(DUTҙ Ht/ô}=/(2B 'B|O.狕\ZHsY%0/xy9F.k }Q?p ƒ>H6-wY]JW[ȑ#Yv9apw#_Dx+W2mڴ=/o߾?e˖˙LK.UV*'OfȐ!UiFApcמ0M}/r,J!_2M`c6L&<\.G"DQU0X]H$D1r,N.u=l&˱nt>7~>s!B!4ve]QQk1k.e!ĶL&袋X`/r `emt:}ǁȏc|Ibc-F.c,_UU馛8|,qtуK$MUUASUA`0H@y*܄hJ&4M֮]]jbL&C8!:A<L:M[[e~K{}iCBl9i. \WZ k142(Aϊ+[ӧOꐷz .sWNkO.]"D0hJ.#P[SiY!,xoobm:'RoB|w\ە P*c5FGY!(/EQ:t(oY|97p{.wYꫯrg3`nF>r%m?^{_;!~'|׌;uþi]ESS3 (_ P,CT,Fu4]'_ u˲M e?IS75*`9ق7/'vBl]\ە\!u@?޽;W\q-O_i.k566rm>pRIW۶0a ,=+ओN=Gjܹs=f^ Ca[&in:/=*O1èF.'-ՑHXM=O4n8biaYhyuM)?/K࿀!B!>i. ֗B]6:x]BDUU8{1.]ʤIq]f<^zN;2yd֮][ʢbJ%&M믿kѣ9}z뭷}ͨKF9нH"J2<8 P(\M:55YEH6!˓H&P}yӿR)TUh<3Clu$O{c3?CBCB|)#@BέgϞ\uU,^'x:jZ֭[ɓk8Syq=q뮻y5b>ls誖-[Ƅ |]9Aģa߾CUT AdcY|RD8!~I ˲hmm;p#h ,X%8x!߮*Z i. !*'?ᩧbŌ?ݻ.sN`}aڴilذenkN.u]nF^x߳;<.s֮]˘1c( e ܟDBMdUl6Kdt]' I8Elb1D.npp8uͼ*߮o+gBi. :r4@?i. !*M޽kYl<vXK?_W 0:W^yw]Mg_.7|3sq5"' Z[[Kiii-CSN>Ă*\nݺNq|}E2K88K}}=PUU?{5`t]dpJ%ڧd2ضMK17!߮uRbȁ~BQ g>`̘1֖V*̱~wMsss괶vS1cHcY* 3?ܷ 8=cw\E44M#Lf Bt ۶4h$B[[+B|!O2 N =z R,qUU޽h4SrhlI.zq.0B!*4ʺP)k1@?!vnv׿5˗/C-wIfϛoY+%z qwSOu3~xi, AR>לOkDQLt_hAUTҩ4Mr9lۢR騊8%D"BcS#YDQ z灢ֆ RKr:SsGaƌ :oM|zW2e=3`,0 jkj P%ZۨqL}eFuUmS,K.8CP@Qq\Es]r,x.ژ1_BIsYeB2!Y!Y4saꫜuYrr=C aذa<#r-Ys]wmj!”)Shb}L4 u}HD8d/RmĢQR)666bE}j9HL&mBmmj4- "Lh4eY} PUn>ѭ.z@%[gx3gӦM=`ԩ{{=~ϳ{$J Z XmKE{xI R*Eh+]"Q墂 @̾>c%3{&|_k֒aV< wQT%WFQ4]cr4I\vM" #* ^EQh4=qcIeJ2#I4QU۲r$N ρo_!%[s%\6b$I|!{jx;w[,v[<^:P4T6o}ly\ys7B۷ogٲet@TÙ_+ Yvz>0unidyNlݶ84&烀)~ZnaiIʗvGo"!. 5C^H<|pn.+4㪫¶g ,_-[6CQ☃kQTC70L}-Qci$h`lQ<'c C'M*q40|۲ȳ/څ>p07^}B1e. k.ϕ= [4D!###[oMoz?)2LV+WrGrI'}Џaby\|Ņ?g>CT*t{( ذaCs^w~4 *ί#&y*z(V+Oeʒ/5" C4%s JYNZ%I:i ^˲1 oz6(}"!$\b<;sރC6S;k'|K.oǚ{:e|#g?Y3QqwsEҥKYr%J9BͲ,#W.t1/ۗC,,:.IQz.j (9amv*A( IP':Fu<<GQɿl6pz=$k\3t"!=$\b us- mH-B̌Q;<~anfN>d4Mv۹+99SַE$>ִ{ IbF8>R #o/tQi~eZضzz=,dͨJ$L&Y (JAFxIB!#4aգ#OzG\ජ!=$\b 5\.icb!SU׾\].ŋX-s.ַr!}{ng̔K,᳟,F9BznBg{cw5ٟgieyS.mrLV#"ʥi4m㨁EeOv*0 Y06FGXi̛7/QS+ b֑pY]#˻e!Eq裏r7r's͛+x_g>-ۿ3UCq'rW322R!vwBg,jV8^΢ 1- 0t˲(hF稊JݦvV+_iqeY](s&lٲ )9%*^0rxOZu}EB1;I,Įj qRjmJB i_*={X`۲,nN[l.?ü=! >>9FGG #/.tzwrd)%4U%" 2$^.)Fv֭[4 cr9DD4ugn)m iw{\B_Bi'f+ sa{2dsY!f}ݗ~]뮻N8aG\ve|mon?Wt-?βe}9 ,sܜ ,H0L ^GETet]R8Sif<DzlTUe-Qvl #O"!$\bda`.˚iS"BQ08S[Xf +V`޼y>nK[n7y|ӟfǎ{x'9s6nvG 90;>V"b($\b=9sQm/K,3%/y ^z)O>$_9c})yЇ>Arأ}.?Ss9zBgpW:G]aٲe6CUģYd*0 H˶4MYa:a`c$ ikpBq8m[21¶m<翟ƚ7vGo+rBOe!vP幰 S]Z !Yeqioxs9z>c(馛xQGW_kRD3wn;_V׹YdIsAyƍ>WU0 IӔrQUe2;Itg_LLL9R,jac|ISTUEtwOzG]EB1I,Įpy6%\B9+X~=\s Gu԰4%֭ /dҥ*OwwOj5jBKӔ|<##J۽n4 u $IP>:zV(l߾(h6s=aAaLZlٺrĺ>ZP!sB: weLmsYj1b3;dժU}T*akA 7k_Z=X_uZv*ˬ\K:G]|{)tQ.,Ku0<ɲ EQ۲(M-qmض=YѨ4 q:b}tMQ"A|t7"![K@O-6UN{5~}o߸ozB1;M7Ŀۿf͚ag,=U (J\;9B| _k)tAWNNN僮dU2M6:8I =ò,(ͪAHIӡZ$ i86~Cw }X>`~Z'bΓG]m\Z-FeCBrs=}ݼoqakMW ,۶UW]%[n)sޏ~#>я:cQ #,s]4]Ƕmv؁KLZqL^'c$%z> iR.zDayضMhFߧlw]&p˪lZh\s`kCBM. {ۻDK)>һ,sEVfժUqyX5cyaO~r>(lcqQ/ۼO_Ed*T0 }UU <˰L0h6DQDq- EUə],TE\.y)I9iGrй<$EB1wI,ybB59@e!kE裏ka\q/$M?131=쳼=:_eg(l]j*aOv"y͑ m.I:V|fAK%t]vtjrh4yNTZ IDATRV 7vI!B!. =\l.K-B]fs9իWs뭷ria_HTU/Q9st:Tz46r#"qL 2<ץQm6ϟO&Al߾]׉0u`Ya;v0 MҌu7>R'} bCBm. {$\)>. !@Q;8/Or饗dɒakRU.׼5>sy,_MP7&DQD,n;zj*aYq16:S*tzؖs{9b\Le,ˢZ'1i.m+?x4 'CEBwpYݳCџ% !4::ʊ+Xf r r X(|OQ8}/֭+tΩ~ R ˲(9a6ciJN)Am( d<<4M^1>1Au\%c(0 TE :.) _aзXVg5q!C''gsasYBTUNcڵ|d>}ओN1,.+t G%#a6^?p]vC$^0 1tzDk .( thZe\%cPͪNVf fyͧ"!{H,pC!oh"裏կ~׿(2c͸}z>{+Wo/o^JLDQD2M2###IŽ;vaH$i:YsQ!c,#KaZyga`&硩*b:>s;E^!B!.. $\-TUEG%\BM8yǹ Ypᰏ5#?|N?aC/}K4xa0o|Q (Y: ,2ANt: $c<ץV i5y~k_!H,^ywLVcH,/L/梋.߫;sy[:cW;ԧ>U2o;hnPUEEjA0h()WؖMxǼy]2ijIZ븞7xPUuyhϹ 'w"b#ohrQx1R-qq/G%^cayG4>( RT, UUIIӔjA@y(T+4]#24M\*19.ߺ?~\-b%@2s2)>\B /~Kx'k9}=77}>{z?8.ndqhyFEt,MɳS"FFFzBxmJN ˶1 QjQai( vɳ iqx;$ EBwpY3?eB1M,Mozz+W~7Fc-W9|PuV/_N/lih?y%ZѱQJ3a' dx' Qfm{m*j۶T~ώ;V1vj&m6I97QН{B!f- dž5x.l.6ΗB_tR.r֭[5\G=#N~:+V`YivYl۶m+l(i8y1cS.1MA4l"rl!8&I{{+Vs`~C7H˲PV*ujz=$@U("I0$qLǨJY۶n`:CQ' 7Q:Q=B6B!π6И=7鑻E6B6ӽ\VYbrB=611DqYzp0, Lp$N{yFF(u۶ MJfI4Y 0L;Vcݓ_?-rB!S3^>aB?:TE5 !B!0gɲeˤCiy+V+t`ϯ*^c[ yZRD8NH4M \ I4M#20dddUUv(BCt,#" C0L0䱟{g=D!Z !Pe˜Z5l. !=P FI]9~~l1<#ISTE!3tgyh(NfǎFG8Ck1>]1 $ Ke4=&i& [:߼B \gCBpY=5;7,cjpY!D|O1uyя~UV:կXm۔*2,ωrL g]Tm[& ey'MS$&2 |$e8yNi{qd~=B!/pY=3޹ `OqsYj1BAe!fo~8 91e}ŲL 0, IFNEWrhNdYFFDٖ뺔e0߿sjvwO!ČpY=fzhDq2cw9GdsY!l' 1=n&/:c}sk_@c&q**iy.4 ,"# `| `XIu0n1A`6Q>z,(J[-z#tȫ> "B* 3)0ٽg'1mc&#&cYY A9i9Q67nVQVPU4]˕ i1%! CL¶,O4-ہ?dKC!ČpY='˿Tt:x!bz{f͚5\tEЧYqxa:Az:~0iw]v qHӌzNZ& C lPV1MMYp!qE1 FeABz?Ϸ~(OlZ=8x!B!o"{Nzn.Tc!$\b~i?|epN⠠@%!U*e*z0ȲN躁iz= @40$KS,#2P}n@>!b$\bz]ovbX{P!B!b1Yq:C_E Lˤ92iQ4^BV\.QT*Q.]EXlټUQټy3G1QaYQ99`B,ˢOǁ73ON !BLU3=0M3P,bI-3/s*Bw؋ɲ۲ (ӄQk Zi}`=6aض댍* m11u]<#4%TU{ײyWUt"B cl]C6Bf. k:nBgda7AYN>n۶ |U(ʨFZZFS.zETrH4M)J=$ f4H4PUMרVhcܵ槬{n{WEB!vBL @M:b?rGsY!l"B̬nOӅxѼIL WM؎ChJg$IB?,4IH,ɲ EU4vȼ&q ˶|@OMry+"!S!#gQ?EQ0K,bI-3K.)tz7(J8jqHپm;rnKI>\γNKxG$8CT"#("MSZu,X4.lizW梇!S!gƫ1z{~. !$\7[n\pI6b5GP)(cx;HbY`%)BJS/EB0gjl\o.˃~B!Bns2>>^ UUx " #4AUl&MSl }ǡ՚ (EVnw(Bg`:t QUr~~vW p!b'VenKk. !ͤCWwy`Ln6w{P_ &&&{XI"30"N6n|2>>yN̟7F[O7OyU8yCB"k]Xһ,bzMg,B yW\BCAEE1Qa{>c( nk*#I:6qEI`ۃmFT*UTMql2M*qL2A]e犼jɠnO!$\bz~ۻlNA?j !B | _o.t~o׽@'di6D|,"3**SIӌE, oߎiԪu&qwdyFTj>QG1)99QaY&6mFUU==B |!B!tpY0gT\B1Cu7/Y4^}YaZiPȳAUEYt0z p=$Mty#Uz=,qXI eiF{/#+:A!(BL躳wsyB!b1}.BgKMӰmZ>HbjIb6^UQqm;dYFhHs8'Iznri9(BVCU54ڪ$ih`kY~ &bΙz#MΚa]wn.S|$\B!}QeŅ[_sjEQHӔXIץh`&a>Si*nceaFFF0h;躎f|kpkICBoF7{ysyj1sY!tZ !ƍYba66uèm$=c4 I]q]zxGRe$IayyNTT*h"(A@ E2M <,-~?2/rBQ4 ~j&j SBpYиٴiSa3N> 5LZF^',"3 ӠhpAŅ*T:Y$ |\*(*y퐦)nrt۶u=S<2 *1B9Me!1r]fڔB!b15q: /Yؤ1aA 99 DQDVz IDATFt]LӢR2>>k(B8n HJLDQm٬fxp]w3xO!A?!1 E2 (+B$B,˸K sq'7oqq4u,B4<'"Y>ip"6 Pɲ ˲M4,˨VKEdy<Ӎ׽OPϢ B! e!z>4G,cj?Òe!bʕ|;)tƑ/] ؟jiTU:ib6aP.H4K0 4MdYFGXn` #I,k_CU4}i 'I" "!3Me!԰,ػ,B!b{//}/Ǽ8h4FxmYT*4mPwQpJ%rr4î ye4 A0;s!sl. mذ'8o#PE `i02q* <#MR(fNT*U$AQt]C4l˦lkIl4{IŔ~<̖ʠ#EB!Ie!?awyl/ !dsY썶lqL]WXJ$TkU'&uF[m @7t\%MR* A9 6]2+ T*e8Aulunvk=lۡR*XϹ uһ=D!& (35ݛnS BN.Meٲel߾pB0 Z(,\^eCiwT*؎y5J&)۶m' Bz>qkNQUuc~iyOέ?yN$pMCBaz#3ֻ<[k1)bB!bay3Qi1A8011c;ضMR|AQU4%MS\Eu8\.q:I֓d8paBB6(`b&퍵VkO"l:e\{,˸袋x <^AIfj4M]Si4((ahF7xlZQՈ$I0MfIxG8`s̶9iw}¨> (4B!f (V |o&aLTR!B1y??9ZWxAi9i211AM~(ae[TkU4M#˲ɾ`CgQH'xmZ躎aXeYt:0TMƻf{_u7sC!(Boƪ1feH-BZ !_^{-7tS3w7{8a`[aE1r4MqJ%,#MSz'qLGiDQDqDTZ<8iRU'S0 ^yN٤R._<^u["B6. Q{o6.˃~B!f|䳟l3cwqvU?]{" IE" *""e" .*2"d 3000(&[PBHB^ZqHrowys uOEQjF$1a`lL6KR#UElo aZk 0@4J%0q 8q"S) Koš4/7O3B1IrYҝ2TGM%BHb4{ǹ S13i JCwLL&jX @DqD.0tTUAA!$aƍض=K4PPesKZ**J(90c.74atJ3B1IrYC5:.Q f I. !;$,F~N;TOiʗ>b2<:lh84 8f`iضMZV**cZ i* aP۲u8q]\.Kwwm3;,ᇩ]/5`| !HB ^`aA$\ofYwYB!ZEb8̙3_=8_:`O޷M74ضM@؁hEef1 00 $6,qP5 y0 8"r\ RLT?}/FލB1$,j.on\B*\[0 9sY4ݾ}la!mJzyʕ ZjiJ_N!L<:**i4 \>G\s=tMV8 Y`[6XWy%4!$>w>a6ɖT.K2@!H#m1h$ ^x! ,H5^SûM1M ]SQTdYLàP(p]L zyj*v&"EZnz<}tMGQƍG$(mYضE<«<4/7>f!b4QK;Ȉ\́~:tF!CVW\yRmiXM6 ⍵oPӲHL&nTUAQDUUŎ"qS)Wx01]) bƍԝ:a4Ae{jy '͒1}G6Z~0hP*!$N0ml&#(BTDqD$+sOũ~BJB .QDI&17s4. ![e\#… 9S}ҙpGw8t(B6<*2~сQT*rYqffd2k(J_`gбm~JeJ*?S^qx N3B1ZIrYH\K;&܁~ e!#$Hj*o~j1+J%4M\\&>02 Za۶QU޾>ab6SPU$7nJUQT*a& peX !+bJմCl{ bHrY!D+H[ 1mذN;j5{@C{0y|7qkAia6AQU Ӳ4Qer9:IdljDQDF ( NA._2Ozi|ؘf!b-'e! e1VUUN?t6lؐZ EQ{0#Ka u}#ˑfmMUQ58=Z?pMT*U8a\OQ>aj*6ضM_v@,O3B1l~9bsͧٳ-;}\~B!i!F 9묳Xreqg:& q M- 4\*Cg'$1%$A EQU̵:Q8u0E!_Xe[DQ3hZ}T:Ǵ!c$~}4|4U-J궒%BT.&c3Ϥ=wdvP4Tizm4PήNE%<4(<(lK\BH4?h8$qB&u]EAuTM2-<ϣ8_cz?קD!+FFIݤ\+ט43kJom\[ u7oͣq,ߟO~̜9UnVBlV&rY[$\zw_m>qᎢ.y`?( Ύ"?:u2vuasB!6@ ˁ 0V.K8#O忶rJo;s?Lblb?>]vY1&9pr8cXEɠk:mp$IBgg'{{ZFmeZqL6Vw0-zFXRRwsn"~^ABHBGB5Fj#-llIxY=boP?I./o4\uUnwq,\B1,[>(RQZudm٪b0iA&!ɒf0tjB.V{͢I$I0 (E!c4Mϣ* ap,\0$ \_eɪ]/B!b,Zχ8N:i-6-FTbYt)/ZV~̞=O׌oA^b4{93pLC@WbP**ip]0T*QUq ju])tuu1?ixGqؖiIj}8A4q] I϶/ΣϽ:x4 B!X&e!'~fw46-FF;R<{Gˍ7ވoM!*i!FN?T{kQIO!C.i MRAUU<ߧEi,s$qץ^]׈+( Xv8 #|Cu:;H0.==AmdY8T.tCqIL =v{ pB!B\Ji->*7sH[t]ޯZ.]wݕO>9D#I$'?GI5ξ;oeYhNyƏC4 ]4MtMGQT0$B* i|}EQ0 bjmFR*P\6K$N&(i^r4 !$,D BdKbHKTU%ɴ%vtMq<2L e+;=|r0 8,L&KXEu(QU8dp_mFåA$DQp]L"Bl;ihA1Y!{!8P[!IrY*.[9rY=lM=PfϞmFzY)b۹S݄o70u]2MFhq*Q:c[jFj*j 4Y~a 0 ! #`":ASUF-ZWץy9`UAB$hҴ+ bȑG- yg9c=;w.{KBBb4xOj9>tHb(j:QGVEU #r,AR׉L&hP() AayTx4 Pf4OљؖEFy`Ӳt "Bl$,ȐZrH\ނ~\cG-W_}>ӧ7{KBXCaŜwyqZ|dg: 4 $!Ib|ߧX@Ӛ DUա$s.!p]6l؀`6CWwG m$14e~Ԯwy-iB!V\bd;{Kմ$[2Я;Lm.bvyg̙ʕ+۽%!ZB*H+0gΨa(J$A'l؋iÖ*eQ-20m{Ym!q ƈJ%zzzP5 Π*a$1lXU_2knK;B!7I. 1EJG֏~\~+V⬳b\p[.B,=X?s7[zGo&og?< =1bH[ 1K/M5Ƥ"|pG(" áA`QفdYlFuTUömrIQh4,0 }f2YjzQU1-4q]UU!?8W?O3B!ޚ$YTH[:oS p%;s)s [l!ZIUU>`~q~vT$z!;0>яr7<7-FkP$Sѕ{(P׉FE5|9#0$Χii nheYG$šL>4]GSUJv&(xGivFn0NF-B! !E(bש۶zM_z]ƌ%Kp5O0~xL"1*M<y䑄a /@ڰas7x#I0c LlVh޼y,^%kuQŖ5$I²e˸;+o~C=yL4 ˲ڽͶXb~:#g|i$}lh:\c4Ya* ,08CR8)8ktvvO&A7 ,˦^j*ᖇ_MUsCB!D+B<ٗQ2V)W'-MiƑGmގmƍ\}\uUU,׿)6l툭7 %ku]L<%k*X7 #838czopۛZ S]yqDQi hyefR4M0$e_!]wFiӦͶE͕o8餓x{Ê+(J<'xsꫯ|qƵ{[b+0o<,YҒ/S(ZXpmoqs=ǂ {キ?r)ڵkS _&:iTljId&qI!iZTU8zபb8LQ۲1-bYq' Z 2 ?<_Mf!B3,tKkseƍO~Œ383YbE$fftI,\`=hQp |C_"> _$ W^y%]t&{Xt)rPXy7yWR}wa $q2/E!lvrm[C sBZad2jfzN\ϳ\.G.ki4Q8r"iCfű9("1QQ*f6MBCkP?~S7TO{-iB!Ħ#Wfwy {aܹ|+_'So֣P(pgxbΝˌ3ڽw.կc=8cy۽%˞ox]r&裏(;rݱ-0 ZN6U $ ƍOEWw|Jidl4MjfrIiDQHgG` m%lLdZ4 !b#[[c7pn%IwAāwAJ}L裏f~̞=[zGIpwrp8۽-1J8gy&=X*k:Vy=8⣻؄QeZ8Ä 5t]' (J yudT*U u=tݠP%41 f EQ/Ԯ^K3B!6$N+:.zd~SO=׾5v}w jZ$&SO|s=<| _<<=cy\x-NbljiSO=Z0p^3<uSa:;bqp.jZJ^Gu"4Mö3DQi -u]\%c$F5 `ݺX4+u#`O<+צv@ B!2= us{ZkZ6ZbիW|vډ?^1:\{,ZO>L&-˗sꩧ뮻r饗BRo|,^8ƍ4O[?vZuMO|9TMCQT쌍e7L\6e}߇{躆a4;[TraDǘyAPl&]\.m$ ,[})k4MABe$,ol_r n je]Ʈ '>- Y~{~ /p3~vo[L6;O!m16Moo/'t˖-K5A4*NKq83X6]EQ83yOw4)I(hG$XMniщa4'٠{qB8NujDQjXmzN.(!w-XJ/ˀM5B!Zbt u~-چ~ݘ^eHEz+?|0ϗbTs痿%SNmQ^/g̙x,Y[cXon:N<%c<ϣg\SA˲|UH`9$Ʋֺ̡n8P+!M( C'c|4\:iӦqnݺvoIM*z(ϧ?Qd{>~ [yVZʼn'8,?Nʹ瞛z4\~̟??o"m׃eۨJ',BQoTUaܸqd26ea[]݄aiFXכ.8V IҬ< ai(X74i^kĞB!%e!F{J+|}}K^j}ӃfM xGx{zZ~~.7 {voIͲ>pM7SOqqaY#Aŋ׿nW\Ѭ[V&GCM|rN:$z{{S5qD.lN=Vr-\wug,I:v/+(IJ0(T*$ Pp]Um&k:`&qf((bϠT*ͤrd2Y{E^Iw tfb9B!H !ޕ֪;l9I}yjuw|~G@:64x(tR\VT*w}WZ2}t|%Rvw /d~dٖ5,^SO=J%n-W]u'ON=V\pئgFDqD yuHH=MӨ$@Ow %?@Q}[45k'|2; ׯoVE,ZE1ydv[9I0ƴ2կ~uL$qoy|rJN=TlS]}˲sF3᫩$hz a00PP1cY6jj󋢈8QU u 40Mh&+ ib:eQxyFn98~?N3B!%e!F ٲS&j+XG}|{QfLMkTЙ:i l,ը=GGaܹoMUUf̘G>6l/m_|n￟.Nʈ@ۃ>93,.we.r:;;SjׯOf` ~Æq{Е EQd3̈́bZ&a^PQ5u(D NF>Իt]jV׉XW ȴM!bwKB.6TS]W[;m~=v_o4M%l}֔ ·qv}.^{-f#n@*!(̞=;:jT}y'W¬YkRXCo[=\0S={'W\qb1XVT8ٰaCj1TE3Lc]ض*y@ؖet;0P)RDEsyUU EQ4 sdlt@u1MLЎQR}B!ŤrYe p,в^trZ8qֽ)v|'schPeUQLV|?_Ӵ%>4m;f^8am_x$l_z%nF~ߒi4.F &|c9UUYt){[ok``{w]͌3F}og-Yk_ݲ.U; .KGk}̙ҥKSsә2C ˲,szhe[KetCǩ À(MIf{0lV%[iضET$I41ME|>OJ友X7PKW+B!İ̄KB3V-h4UռZ|2YG~b:&5Uzi*qoU#Ib&CQUL2r6^94Y[G~{ÐiӦzE<@NcGޛn/xXbp\|X5,Z)c㏧#;oϞ;LF,ִ=OA'lb[ӠV&N$I6uq]9`A lS% ܳyVџeWUiB!jV-Voxzojz|vUmN \NG9fLPuhMEؖNƸ<JUjmjʃ>ܹsٰa;찃$ĨcY'|2SLaժU{[o+ C~iΝ˲e~fmڽ-.vmX%kk.lXb}SG?P[$I.~ߤCӷ{MymU 81MMӆ.{O6EQ*=2UQfˊu!cgPU0 <  V8x.^ɓ+֤yp B!^\bY}+ m'tQȶMO.ed-CޅwL,ͣ7Uͪ]׈dhߛU` $I58+7u&vvQGv%%@ϛ,xM4{.Q<؃9FQTUAx͏E=$~q騊Į{x{~F_yK˗ss}Q,aڈ1(ԩS9c2+VիWs-p]wf6mڨMe\ $I 70,>h=Qg S؃tLDUUa(*:qSdl?h&IPT˶L USˆ$mMUThF>\z ?BZF_GO{π"Ba%|Vs7uc1koh2A42];3n ӧcfw%Ħfm8җD,]tZoe޼y\DQČ3FErkq뭷/dѐ\㘟'zN3 ,K5:3> *ߞ5daF=w,,mS.,(+ AQttEEQ,]שjr97*?{*U!3  Wl鶅+ -m}mGݷnP( ȤH!H cg>BjGy*u*r67Wg=w`+h4f?Cb3sD?]\+}hRpŹ2! L0Wɉ@ˆ@f*L'YaH)cБ Rd^*uwH5KzzzXb]]]̛7֑nKyWsgpEQ*XnZmz[y߽߽QPˣ=}ofpet1+,_˗ٽ3EP*VH!p|>aHZ=**B!?u] <~`[6Z֖DL6mmV+E(wytjΚXY4FIhqYbŜ߽́8T3ЙxD5L!$xz?OHI!!0$ C0sEd6 1S'rs>y<]}U atV˓O>Igg'6l`L2ehbOOXI ԩ~ʥ2jzDh!o=7*jNZh4͠F3v \ |ϚE] 3~;7n.Q7TW?y-0ۿ[>Og^++Xl۷oϬiکT*H$b`\["V>RJ~mH)A&)L0s6APaR.,4immqbRJƍk4-zy^z}gf >j4FُF3vICXq?r7Y֡h;X#hT~rs9ЧnS78CEiH82CƢa(Lc0"$}~4E+8j|Y=Ahg˖-?o4M.\OhB,X\'ݻL8̒W^yn_W1o޼1;lj+UVe^K->keEV/}u>n!͛!D*:Xʉ*rZM?K'B'_(.m&!9ۦSJ%|l;G__/B0 ) xϯ|߽<4>q,h4FhqYl.Zy&o~9!SKy+?}-|QjM0 ('qB(q9"9k9Jj9FBH50@ c8^Qe  ț&͙)G̣=oЯKtuuq=3|H3apA|q֯_O0ʇon߾;3n )%-x2f{{{5k2e&O駟y}p)-?rp=tvvyff̘a5c &| .۶Yn頪Aý 7@RaJnk'?ɐT/Qs1.g^˶m_??˼VVDQ??dIq1RׇxX页SJL8W=ϣ)ZZNP˶cQ.TTeY8($"^ݾ?+v4Fь.ь}l#CPa)w>a~cW7'N< 7qHDe#uoQ,b-h0 k'I )zԅ~DH"Uis8tT*u7@Ϛ xx'4i3gٰ1E\OfҥL8_|W <[neܹtttt[C)._xᅣB\޹s'^zi}cͼVtvvrmeZc|C(MjmQ($.NW,|!O__AvٶE;Ƶ{>8eJq}6<}ʥ2aBp}?C jGhCh4fF3yCl[&S'c0W=O}b584"8"P#2ǰaJmd g)a!LAat{a"OJA DpNq%]8cL'&Gy̝͛wI>g*+R#ض1ҥK7o6mbǎ#WfŊrFJ,J;YGo%ӝTF&Fh4!Eا,u3m7lakh&+sŹb꟢8}y܈Hm$"!P0R͑Zh )AvMBv"%[L{x E59mmW7hHصkw}77tZ R,G-fRxb."?xo͛GwdÆ r-<ttt0g`?W^F:c˖-,[7x#ZƍkeѢEʒ7|4br{??RPSqF4\.Bݍi!rj5QQT("C p'g)SѢDJ#7/6NaE M#9CJk>Re/'!{0LEf1 L׍-@J a2M)uxvTٳ_WtM8âE(@[jHׯ<ݙך2e ̘1#ZYi&.2vcr~ -B!bUr P ?6|^]6xR.y>e~s^ڂa{T+iBebJz~hFzfǍtE4Fьn4`H8d4nߴm[y||zⷸ(DB {?̓&Waz2E(0ĔfL6V"֍-"14b0x!c:br5[ "e>~,]0/غg`ƍzW\.3oZ{q,;&7oR3tָa5!}C oq4CkΠ^ᠯ|Nvy7nH3irᇳdm6^{nmuq73i$fΜjb%.?#|_q?>]wuҗ /i#d]XMoOejzݡT,sٲLr9Q>#DmcS~m[T*UZ[[!BRH^T*!=:?sY_XeFh4c-.k4%XRs8xxJ{ye?;1PH\̉D8"6qEsKnq"%\Pĉ619aH)p=_m4{9ses6sV[ڀR*4MfL#3 TjE4;*׮]˴ij{ 0;w.g9fÆ #;yf~__J%ϟjbn66o<$k |}mX;00 ;~L z5+\۲|?` %,VC;]0 mP182\B@ZM?G%qR {>=l/F?| n:Fh41>h/6_Įx>NOZZ.ɇ߿)V-6Da%q'̉ț\$QRy2"u(WtFj jҺACnLNgd='Ai5M) ="Ӂ 04M09li:asy܃p\SQ|E/7|3wmmm̝;IhF?x '9}u[Wٱc/я~DE,Z\.7m(+W2qy8b1~_o| >裏ꫯ\.g^+k_̴!3&Ea&~PU.# RRZ[[ JJ>0 5ODaH>\|.amR]O0=˶0 6zّ@oE4Fь-_ b]}U0dk;NK1W d!frBc_cRu} a8H@f#D|8hb;$Ħ%$6#TRa`ẙH عe#:Nh) Bx2@?7>$559"0BTfHxl&~zvvȌV.B.Rj5n!-Ʋ,>O|r.\8 +sod'x"}j~7z*eYR/Ko6tp'B,ˢZR(k[T(xnL˂H@R`[P,0_gZQ-TkU,@כI_oiRTytf_BNh4F3&efc+8o9|9qFo`p3nkRhK9AFCuVoHaH( C(7.؜828hD,X 0BJќRcYGuK.eѢElٲ7xcz[0g䠃zO [yWdK.d}(nkuGꪫ2ɇUV77霅,Z/X q,bPz IDATJ|8* s9 H)(DQDζ1- ۲uR>r۲(W_}5sHy\qs9>C֓O>9UE\s5tMCO~}kſ6l`ɒ%Tj 9.:8:.b!_48 \*؋CGG;v<ףX*iR)g0 9 VkDQm۸G[[+}jwRضnga7oeY@h4FAFP`^~),q @,ֆq$/l7;c8&c1ҵA$bvW"Tc|'? 76~f̘?c=Аjժ!f06?Odw3 _җ m۸ ٽ{wf5lS,bԉS( T꽸^>b q! 6 #}X{ı{?Yb:L)t =%nf`@ai] xpK̆1%KK}˶\7 <* mXu àtW \>x7 =-p&dYDh4ghv |2D>g7%lf1(s*,c H6 iǮL/oΔ`Fh4#thEY)G#`菸}[NvIH- fE۩1 s3,DD)dC5%Dޒ~|G+XU50$HDT{,K q?Iɰ( cqQ">!D7%/n~{Zꍯz*˗/SN/D{ u~W_ڵkG}0 9 >nO#<ȐSO u׾6d1WU;a5Aկ~5Esas0M>lFeYBVösQ/zFB5 Cmmj8zz{hG__?Ba DwQȝe۳<]q@S5FhFޡX ! Iؖg+9Uj$ B4 DdN ws\a|L6',@ is`??@r0Fe Α͊+H)%Am4 =Lؕ,PaA@X5้@H e`b{ _0cvve=hHشi+W䮻"35ьC9/c=۷yny饗馛o={^{mHZlٟzΕW^c=6$aww{(L5o>P :$k=* ͳ>;$}B*ޖt./]]?/~Hzx;,Ї>y;;N5Mh',4%9ۦZ3|<9 Ll!r.ظu'w=6\m Fh4#ڹѼ]XZp>|"8`(J#$@Ek43(!7q1'{ Dġf!#j0D;c';c0(w)%a"MrGi*r^a@Z/A4 c;Z=Gk:%St$9 T-<6n=|NM vV1RA>\]ar 9 R p]aC\RP,ٹto.@]rqzţ8^~\cjFh4Q>h{LΙ,yӕGfX@NH%$b24Il0 YxB"jE*n" 1*0?^aN$*4j"ɨ0RZC,D6vyT 83x'UVcmƥ^ʶmakŋg^kزe ^x!===Y&LnoŲ\֖zrKY ' #Pu^hS\nVR,Vj)RWr>' +V`Ν̛7qƍt[>a .SOkX͛Yr%w}7ryFo[z0Kq*˖-7twk f^kؽ{7˖-c׮]ՐB'ƬiSp]W JB'gR1e˲p۲ æ {SaQytP,m֖Vzz{i) =]]^f{2;v`'p*jGFh4͟5/QF)`|':cϠzlݝDQċ/7Okk+s(k4sYg}|>ڵk^~zXp!rysYutwdŊ|.<_2k֬ɴɇY剢qm8g(bb 1B{"T*XE0 ,uq$cY]AeBXK[xw&|uE4FѼ7F `pH7ضd\KR,0/ HMbs2!6f=52&C aAuޒ$"^NOlj3`=̦i>4S;͂ˉ,c1j*ٔj_Xfa;΀NfZ)MEӱL]=xvTlݺ\! ,WFP*?3-[ɓy饗2ͧ -[0gƏyݡ,Y޷zj.rqy" Co&:8dz/ rwP,F!Fa[ARq\a *" x'O0 FA%-ˢBkk޶Pfz׀Yh4FA{_St#oǮ~X~:|ƷQE (`~!DQ=jor8 4j:ES9wd08%[8k"6B82ʹVrvlQN7Z'7 ͥ|fO#0\==T}OO{/f޼yt[>aYG}4K.el޼۷t[oKY+VzjNʴi2wM7 `=!WZˇA>{l:;;8qb浆?O3q쩜t b:VڊSs,:.2_a{>f>ZUeZ? *JCa⺮z>"!$z*sn%.k4F Z\h޻(Q=ݬRwĺM!: S6DDXNA!Pb09ΒDia0@H00Xh%vc MNQ<+16@ PWeYintirIhҋmflNr-$ #,L3' Cq͂xs_#SiVmLʙ,It[O8O<L>)St[>!`ѢE\xᅜp ܹW^yezG6nC>ۙ3gΐ =_Wp/-\kk '+Wkʹ)uBG/·YAC IDATBr((( 1b9q.+Gr?1 m p" 8.iVEA-0d/.H$Nc8ñIDmxxm[~4"C(u",{˲R9_Jx6RQ(mێ 1%\’%K(̜9srwq,[3g+k׮nm}wtvv~zO*粔˲w3N;۷s饗Օk2%|%[!\Z޸ADD.g A>W ~Xl+.~*Qb|ZFEl$zKJX,zi }5cWGyYh4FnDF3j8X|h^;8E5`hxH GY@g!@#O97b?4%jm ,ZG=\E iY'bvs]P$㤇  x|:ԩѠRl˦Za[V8rʦLAaDZ[wO xuF?Ї)s5c({yG}O+Oߧ't>68ɚ}c|3d%Kd:lR~0O >Uwl\J˲RMXu]0x0 RHryJJT<<#"V"Mo/zyg /Yh4FQBh4~߁=-#;S [xr&LS2u8LS$yˑ_bN5 0IaHFsH9.h Ŝɉ)%cf*{ {b0,L-7ɉm: 4+:dq\":I,H eCf!R@9ƐRr9:ZKx-I 2MXr%wu|4ьa̞=0KY _&B;),XmEo7|suuԧ>׿NXv]˗~LuBfA^Tyꢧ0|! uVj՚z_3%|>}ߋ,”JrH)A7f7HI?B޾H]O^ΈeFh4ӪF1`>0sl5/SǷ). r2Ne.%MyA&­0 IJ8`7.h&ɱ#؎d c`6s,&|8@ TY8~{99B%4S9"q;7GaXC<;guL:Jh5vFixKrC o+8d8fydꮟOбsN~_r78 .P(t[>qO| ֭[~gG1v◿%r Ah"rqm3C%.>_ɑ=Ð0Aw>.WeFyxA‡Wmے1UUL:z:.>t6mXރ,>r \s O>$z*k׮]eϊ9^W󶷽^Ǯ] +%prǒb޽+ge]ƻNӶ-SN%(˒<zop8Da} f7S] UuKF1@׵oN0e-4-|D |3/@y!ˁ@7+P7 = _n\d !G|_>op 7o|R\vaq]ap#\6Hi86ckme<6m8vak"k'}w-4I:lMDYH_PU8h&x.#N:npVNݴ郋9h;k9cٲeQN.??e]͛&:HP%z+\s <r 7n_oo񶷽|?̇?c k`ofyN!s '1YR$N9Bʲ2p4ҀN{YQU%S)}63$IS]iw}&yOM @ ?@`53H?/gt3ٺy m/7&Ծa?O/0gUM)"?s:X):6,YaJJ*52AK)?-awmcEYVıͭ=qWp>GQo0xKv蠒,EA̖Z</ݶo~At|rW++~ T Bk/nmy衇xVz)?Wz _{&zg ϦsffgLx~mf~~YJuZQJ)T~@`}ڦ%ddY`8 V1-KKdijwBIڦ!IK$q?y/w$O\I@ @ 0I6 9 N:v39wf6a)F1,SXDb\u=.`u!xz]mӘ`yL%☪,?avnes3*t5ݟmKޫ gӈ[@A+ᴈ=g'Ie@ŊF, n}7_, #1!6o;NήrgM۶|?yA#_^Dַoo{E$X?m}Oo@SהUEe()) sm0Cl<7Nd&ʪ$d 9EQ)Mې١yQ$ E;fƀQ<%eoIWO @ ?d"`zEWt- {ή=H!شn! c6c-79)Z-(oN֯ /ރOp=MP0ob,P>'9nOEP>֑f,o5;eƸ&9$p \vJkQ(UkbtYluӀ=feNie0fL.r`@G41ueU`ȮN s, /=矇ISU~;\s vg+@Yn:.򖷐$ ;w-'cOW+cwɹX_8c׭\!R9eUѶ-EYB SS=!Q2 t:$IOUU9v.r\N}?o@ 8!\'5p'|p"eEWtJ|4,9v4,]F"2B xr!p|𵁯 H[.0yh[Rv!>mtAucn`kT.dv jmn$ol;߳sMHk=:}L1N]פQvF5mcnM24M S٧r'0,J|fv;m۲{n>p73;;i[ށjfjj.. 6{nVzY,˸+y^K/E)vYnʆik^ݐ9ZWH)IJ !K%+1r|PrtSkn17}M+4iRV% Ory'vK@ G%!\+A +6c=ͼjwY7ankeYk9mccQ> i[ֵ\ͮy睷K=Kme)C'tzıB1nE$nkƨk4%c$aqq5UU1uS&&pBITMtiiYZ']Z'yw&z@ gCJ( ]yվ7-<_~~X;ۖnPJ.s "A`Jk{ŘC kvdp`Fb챜n ֺǃ(bBJiZj=}h#*ov9eGEmK!|ıQ|X5HIlBqFheTN/93S<YgKZk~w;cy[E[Jst2}bt:(km۰DeԵ2 d0H]i"z&&aD3-g!(7ؾI+g&y@ g˪ pO0m8HWtEaECO08v݌ ȄR2rF4 yXIiĶ{5`Aܶv}Jkӈ^3Q}Fр@9 |]ڷ+&Ð +ELc̶zi4fwN mFن4fF6!a,ɒ.8Y%,&rd(o\s5w}x{+@DQ'̛&^ײݻ hgڵ\uUl߾}2ڶ<[o( XZWC뚼Vղd"LMZ333CS=I0Ju2$ap1p$@ @ XnxaS+phim+s=;6r=Fsm`ܺsዴ;dIg~XZ3O5'޻]mI穴^MBx-1? z˲$Nbb$֗bj;Hɭ =t[cptzo9mKkQK8~^~ɜ}&gVi~po|uq)x 9cx^ǛfviG77nkSNYLO~=ƋN>95:z=:+Q$hچNCYVi!i5FT7vlz(E^s$ 4Mɲ4,+ZsC{'o_>Ƀ@ p9VrIcWrA[ؼaR" c׹pU3υ<87(-js686mS7~h{8VԶU2?#|S3 q}v.bw0;;ˮ]+i&ZlYk>0c|:~Yf >kfIJfLEAۥUDDrZ풩u -ynwRW(!^n= c>H OJj>`spJ.pfמ4(ؼa,~n@s)cn`؎>Dn?NűvBv.`הw0;o@(ˑ&_@i]ţH "RH4^g]k<^IzNh5>NI4شJ qj㇮Hӌp`.MƜuqgMxfa[K__=~۷VzYaɲ\~tI<<ѣ=kjWMǬ捯x1sCn2ҌCAQR6f`0$ct cqaѸEB@)E,.,RUuS3ݛbSpILT~@ p9Ph'?m+P <c7.qܺYL F &m[ay.ˬ =$G]n=N1!i"Je-kq/cuMFAz6&XnjD$L`ݘ@=~隦5ńnYBWڄƸv z$eAkXG@UDQD$mq\gZџI07W_͞={8SذaJ/+8,J)9󒗼'xG}t8ӸY~UoyNu]_VbtSt95Y6o)DZBHIS7Hסջ) 񿀷?]@ /B^< i *~?k[xtAn<~Nvf*m71RyڹAucauakczz, cum2 /]iƇݢJ*ꦶѹmL3Fj3DII^~;kb(bꦡ,+ 8ZFkM'HPI{?obf o IDAT_t gaSVk:n66nI'́UOElݺ7ͼUСC{+38^xM D1/ SB&vXbiOu5CQdnqL]7t{]}fgg)˒8vn'[6P׵W5I9H'(β<ω"*V C/[w{ԓki"۷/~|#( vAYee͚55K/sNթ +v+ȻjK(_]GAE)I CX1!v;Qgl֭C?/ǹi75CZpvַ6SaJ筛72cXym sب120x<VMFMv {iC5Nq0;@fvHE"ڑSF4!.VRpɛ `l'=[kx'ٺu+k׮]e򗿜/;{wC& ?ӣzg@Y?D󪗜'Cv^NYVLOOӶ Q$%I>uIst2,,Kz.KKKy,u n`0& 葊@HA$Zkk߽eUkz_yH47&zy"|,y^Q9Uִ-MT((2R4$I.KXR4M`HdYFS7Il!m58t: a#,r?>1@ )BfZ`7F`ۊ0'sww~cΰf,pun1pXƨa;Iͨ-60ml"7- [n$( b{ mCm/J ºZCDeYtzGQV|,X);0pQm1 SߐVR$EQҶp REUn}'?Oc_}>1nF9|=X!رco}[ ٿ?<׽u};iۖ+na9MH҄(QR-}J%iR0stii !ޔ [ReI4=;2 m`-eAyeve=~O?@ 4Gr> 3+ghۖG>WzSl\36e̺<5MpCHAS7D"""B)#c[s9l]!},J cS7IYtEh;{ XX.e,K͹ [ڶvUiIӔ(I3)K3XN☺@C`@j[2aяruMm^ϫjJᬵvZZ=iZ$&MS o7[EQnc6~ 4cw6 h$ӕf+L;]XѶ-I 'к"NbTUiIcWeEȱ_5-Ɗmsg~n'̳4,ȑfaaokm6fggWzYaY~=}ky[Bܹso}[y{{wOsOaץ4UU1km0 7ıb0PV AyNUiۦFמDZ'qvKUU n$0gffR|e?Qmr @ !\?,7Wٯ4W-ܺ{C+(\`*-`߃m XBpEı*+p Z~P`Uwc!B. ]eI'ı zۦd7 MXv 1ʪ*;d'b+)Ću]ڽ6IRU%qPU}7E$hcis~v-Y՘y#9~ξCKq9"TUmW_ͮ]زe 6mZeezzW`޽#B;;ΣQ~뭷mΥ]"41md$J >z:uRcͲ>EN$KKKYb4Mz15=ES7LMO6-R?"pp @ G' u򬘛s9Zkaо` Jk?$O)Nkܐ;isZ,(m@+01zƸ :TRB&u^H)Il8i&᱊ ?ZJnjCDnFQZWLOOSU#pek;1DG>}g'0u}pW׾v.b۷9?_WV9{/o ;T'-"]vt:nEڶd$*8BH覩"h*J3=3MeKK} W) gY֚$NȋNCYU|?ssO @ $|*  |8ؼ+: yٹ)n^E1sSL:.em 0z k$"?DOJ֕11MkSuɲ y nPmɋ6ABa[-x-$x]Gp4~kX+.ʒ$IlR"3#ꖖ({uwY)EUU^K0_d:m\tiD< U=yGs糟,'@$Yf@)%ɡCsG`U'x/#%dTM*N1$)i~ Mmvijv OK'ˈ"ennpH>̉8c?I*;̶in(3jߞA@ &M@`9- *p pJ.pq~sSLuLMP?a?dx8 mևʮDnadYײ,BH%AfҬ,KSu5RIh!Mh!L@hZmۚiImR1nvmEnep5D~4RITLھfm"/ ߶v^T(%">e3Muyn:tn뮻yN?tWzYCyzQ.:t/\cW7<65Z$)5RJ!SӴmceUUR7fN + ulGdYJۥ(J0eI7v I:D]7Q %>uNS>H  r x>> Mm|<gX;_fmL5r ?@35nǛ`4I|pZ7o/GQ!m&e[bN찾qƸTkMmݶ-mG&[xmE]״M0j}kCQ]y|s*R+1ʲDIE5iyQ2WQ~ QVm'+ʉǮ€ێ~(o\}tIs1+@qUWp0yW\}7wpi"e̿Gv:](4>|0CnJkK}$e!"N"Ib󸲬D",#Ib CFL@ Ϟ`sy8wH!@56{&xs۶frkf7,O)aǻ0`| o45bL (ЕFHa7y˪HB)!YMÙٵ9f0EiBJT89~}Q1 JX^粪tL`R 8VeicO[w]ʅ^W\^e@`%yы^#)>1۷o'!{ RqOOO?+Z=ms7N8/ٺqubZTڼ_{Mݐ$ Y'cqq4MBƅoF3Ӵ-Z#$ґ6\"Mr7b/w_&^@ #.OMMu__}l8ۨ8^Ei3Wqy m۠+M`=ss8&IʢwOjjst:,,,rvu%`n˧zOCj17@ Br4p;SM+Ð{+wKXeZ:iU f;qlRr6MCۘ`ֵ}~ȟm4?F IZP~b;R^Յ^Il* [({[XkP*ePRR-uqanZNlaǾqa; }ȑ($U)˒$IiZT֜|^spi/qhi<!7kxG8ٰaJ/+SFh.|s+1NO?5G/+.h$gq=s~M6h[gz~4)򜬓ٝ iK}֭[K?`0Лf*@6EMC$UU6-,..1iQY20ZDʑii]=-<8܈~ p$@ Rr Y> O`8X͚e|{ٳΰ1㶥p<`j"Ib"SO`m[ mkTMCb;t5&R*n,K@@!|kY!}sVE"li)G֭ =kGw%f(eB`@DDĤiJ4&8!|YjVKDZqNףt9Sܶ%.Å0y H57gwE/>(x|of|A>O/|Nömۼ%Gݿ;+ZY슘7^t.NjYyߕEIVQkMӴ4mcS2""4ΐ,M 5n6SÜ^%,C}nmvs>H J@  {&\> X͞<{bn鮏3GBF ʇ|Mke]B   J]AǾ]VȊL$iږ4I3yy=t.ʷM[.(6-,(Z~#*ĺ;ݎ4I~X)Ӛ*u:ܶD"l$IچtB6ER1Y,9o ⋷Ic?OQM.4:R<| _);vtVzY~,..>9={w~?cdL7ּb7wt{];T+N֡ijf$M)Hkۀ$IPVk$D͍i vw4t͡aŧv'd;#@ 0aj N8mF^}N0ˠ{6'֚;k{M6q/s9s333/r]z?ĎE39fU&骢(T^mN%Jv)Ih?S%ziRovءRH|4M?)14Mma'zbb-$@ r 7*s==+9'l7ZzeeQ2 4Mcn)s/d LOOFodv*QDk4q;Ւ77m;@@+C vTuͽ=߾~Φd t]Z5 V4>mZRʇ.&2 \.H6 RdAۘFs$>lґm0IJ*8)׍!G$eYQ%NsGk]37egm/F&<zn4MÎ;Htx.7Mo}k=;N$2(˒du 믏$AHsʊjR-2K>i}lcPJKpx%$@ r <>`} Ҵ-? <<Y?;FhZ! "ZJ!yi>Ju7qxykmǦl/Y45MaPRHTH2Mk#5k0M4IPJQfk~.ijvUE0G0ƽneYՄ-N6bV-DĩǮ߼\x&E7̵^ٶm+ pg&zt,u2eU6 I255o*vBHAS7YJf^[%M4in,˨sM\<RUNCrqc| *w'y@ F@`1@>byyv'f~m+]p H4Mc+fGW=833 2H%}X]3jq*zIJY(PR1̇TUe@ !J*zf D0ɲ|ִmQfiU;F=nkx+ԋ(M(7( (xٝ;@ӴHMS9XR?NϚ,۸ٽ{7[laӦM+*o|oz8uzϝev5Y뚲,? "/R_Sif(_ӡ]cB h[LMOuE$EAe N֡n?Wypp)&q Oa:N ^ \[<+0%@l/H8A I+u ])6CfJ!L`kǽnmM+(QDe\]iu]շm#yؑz5䪪m$f8ɲQCx8f}׶-,g)%EY`% >䪬t2z!z$IBQ>wt(m/Me8QR2s^=2;~g+K.$x$[n駟~s '~htM_$8n ĊNdnIQ]Θ8VSn^5eѺbzj8f ͂5kג C|˹ij=ǿ|;M3?&y@ J5]܊0, r{|!i ,( RKȇN!i욹ig爕2mh5RpԺTۦ%MBzt3(2!f}eYy G 4M $Il Z7֬~?{nF17oYzo( ފw0)nRĤn7MbTPF%!(h@I sڈ}N7Oylγ\["ۖ~aHG(!¥6rEnU*0$b7uO4I$}?Pו 8F)E۶nKV)(b2)|:=u ?o092޽/K/!۷oGJATsy͚5GEsx!I+͚Q9yߑq9nOs,N&t]xumӒz0 A{MӸ0t St]G"X-Km$nY۶kG/s(MhSW5APյn*w=J)fffP,+B:wtдm%ņi'6PDpfqM4Iټ߸mkWGx<gJ=6[WuEC:ؽ~.lZ7&OcW6PFr؆"tC3{Hk.l U=CӴ-Ma@۴qB;E6z؞}/2nHmK$`cۨClkپm?QV1Mۺ j3BN-5,K0 w{pUh5;t_iȸQ u枟i-ٺX`B TUŵ^%\wI'ĦMV{YU(=ҝ?0^x!5d$x/5c0DDKa eY"a 3Ih:}&EIgz_Yk: Akn:yx<#\x'|2gV`E+IJ/9Wrus2 N;4.b~w~,;*>.|&[V`E+K]\tEtMSΫ_t:gr<ِynnǤf:ʲd,AeUeͻLڶu$I{UUguPz7Ϳ߱{A09͋x<-x-<58|(8Cs^~0c׹WAUVD:pgBaZnr^eƱlqKE\نu pe Co|PוkM0iنTe|QnY0CM\UڿTG(BBWUM(h'u0h[  IJ,ݠ kL$Yq)[x3v?z9C}qW|(ؾ};h2U3ɺuxް+Z9|']wT[7|9 !d;0Ͱ \XXtϪ(P9 h]QӴEAjOfzp_ڤ92=@hږ0W2ֶ_E<x|sx-8iX?;U/8X _a:~R\ "M0t>aqlھ JFr$" tU4z[!v:d"R:#6&ڻln/;7NqDYVu4ǔ']!)ݺ{J)iUNagYVyF4(zm٠(@7soZHtu,cZqmo½OH$ o|}o߾LO>r֭\z+aNMS/qDZ۠!2Q]GoSM1Bʢk=Ԝ2 Ðn<;{4%4 طo?AgIP%Bv9e߸Ik7: x<m<5i N[=er=~H*6o%1 8hvi^ a>4_Juz:Lƴvw95L(+B[v3$l[ժU.𵡯 c-(?DuEYV8&IN@a~R𸡮k^I"Ez-Q95q̤(H;P1aPVѓ{0 ֎x dPԊ=?K?+]qM7~믿M6qI'~:Ynۿ+Z>я~tx֖ m2֛s]GNeQgUgLY/,9mӰf^Ou~DDw&dVqCi6uݳo~gy j^i | x<,ÇԦn. <x檮qhU]vr`丵cFY8qlai.%qiEXsm۰74a0D"r(*ՙysaPrNi[)QDYiיuAl\IuQL$2q!vwp B&[OUqOe k/kJ7jD(P4MzqƋ8?wuݰ{}{'?ɗ%BgKu$qn€(`Ǒy&A Y6 m?p7oaR5Ӽ Wv:x<OKntKZ`G~>;xуlް\FLTG,cא is`p-^Wa`0ǨC:pLQ .-heUaJaTuD]" I$0ߢ)c4M0ྏ(|E Ӡ|]q4k@{X_2X+W# Bׂn<`8٧lտp6qуG={/~}c4MÎ;𿣜 ׯ_Dy;_cȘmITpLݜ(Wͳ>G(4X;a貨6(u!"jJ,$&7_LO 8͋x<x)#oU^bI/>O @l ! `ـ zd74845J}6F NK^(vj !ک2"dJ9_^ :Z ƃP+*a6"Y,Ki[ Q;`egqiq:ksH۴ڛl=tYWӴ-J)$qJGY}q/o/9#u|`zX 6UN4L$^Ha0p4mXw !"Nظ׼i- e#s(K[ؼy32E_y8_ .`Vk.ַ8"m"#~/BIDMa{ڶmf)RRUҜаZa:33Z~y&$:,sfB E{=jUӼx<юx,c-6󉫼'ĆųN!10~۩U U.ȅ˰Ұ-cIQLHM0${ATjF.V*[g}m4dƑNB,4mAG1eU&)uSuLUW$2hnFњGma}=Ek3 !J7M[<FD3Fe"zmO~vϛfv=k0w^v.fyN]4>1Q!]0ϥ8&6ʲd4m @n2J)74t@ohEBЛSB >0=¿^sjFӾx<юo.{<1y*iUW8u<ȿ}NZqFI<\C%uF_UAcak8&&`RzT]kO,J8B߲ eӴ- h)J%ɳ,!BD(# J)\8nF}h8"v]#B}OY$R˘,?<}T !ʴk׆lۖu39v"/?4b{:f=C|0}vy9MsC6noo~6/khm-I=C&fQ۶nCN;C6Ml>ᥓ 8z IDATM0M[zwE<x x=Vy-O8g/u`Z'4_]>9e:$1UU;sGe\;u0^eEI,c(rﱤ(xCB;NxH;:ݺ,h0\RfDUqPmKtQH6(1:s]u]VpXfk2M [WUUio/m4McTM¿yh7M,1ozӛx[ '[lYi??8J)sN:/=~S8x }aS, }r_w'B( }ba4,ٰa]6ޠ*Ji lfb|'F{ōYx<4|sxdi͘(N L86C욶5, SJT:>D$R2fH |^=j_)СvEÀ4òf( s]&WuB7 '©)`0 }Sc>{cq(Jk7$m[:`0f<`i,g藼mkJ-~ȵg_<~gp#Qݑ?o\~\z!رcY{\p.z7/9H`pPlP먪8h<<#"H34b2YX\DFLbg`{=9p'>ari^x<穈=0O֬I w۷0 ټnDtk‘ F7Jҵm0a}B, 5 pAk5bWu Y%tS7mKG @@Z(v!6@YV`[/kۯ#6wemشu9"d" N"#7/jHA!x{AEڹ0@>uSZJƻީf<)xsOa1\p_|1guj/iM(gǎ|_=67x#]tъ8[KE`NSʲL$uMKϞɄ|4:4`V$ eQhِϼ5kf MhDQf)Y0;wN.>;x<o򄐑sN^xkF igkDUYh0 ,"AӴ0 nYAEM7y2شn>۰Vn@p5Q; ABE:ȶBݶ#Ӧcf<I A4IBex9M]6-صGI)t`$d>2o!0_-\qk/})_|1x+y-=[v,xY?,RUQݴJӔEѭdžmKYǓi(i$}GU4uC>s7SMx\x<ϴx$uGtbу|{;끽50Kݺ,,, 8pDLQF xc2޵k9U=&m[E&U #%qDwƑ8Um*uT !" G߭8ϴ#3נ^X\ڍDBsNqLun|Prj81mv`耾(Jc./,,ַ]v{[ _]8AS` yP׵0emŅE<#3^xl. оa iG#Doօ!'|)~fE<xpx1EhgqGW`Ey;W{ϟy~?x:0^0s4X}֣F2ϘU0' Ǝ a 2'YeYwy%a|7r=Ӽ /E<xNxlZwǮj\u] 3L8,йLhWrB, g v1MgBeA@QVAZڽl6Ht[xRL5N7u1tk"bnۖ,K]KQ4OeYZb)k p~MNuYNw]O0k'R6mg{_,EK 2d)Ց$5Rmk;2*$mKٜ_~~Q]ϮGG{O~|K_"sm憪y+\޴i{V`E/| +~[7mǓSWUeT:-ʴ{sB)Ehiw]j[u$I5PngTUOd }ױշí?zh^ >͋x<<*x<j7b>iUW84+Ɂ0JzƋ(Pn/WRM{H9:P]G&t##tuv uh"bLyp hr8ע~aHI4MvWב)Q!e !u4 @(t!RJ7KJ^W75"\C۠*M& T4MZ-o=~.7DB*}8NU/81aǎN!=qc]p8w"XeKϦnk7~BrG)r#1 Ʉ(>gq)oi[Md G#aܿoGӼ Ӽx<.{<g; ls~v96fqq١umۺJ ޚ`dV"tM0:rAR&eA$"nPeõCatn@¾IU5OU)İZ軎mI,"A UtMWuE:NuKR7m5#}Or 4@b"" G0D1i1[gn¯>w[6}sՓ󰸸7 .yN=T֭[:y{߻"J5/w秱y } + 0('y4b2=4gڡ/шf /Ӿx<.{<Hb7iS@wjv?zn;v=lnF 6\Xj\UfYjn蚀42ˇ^k$Tǒ(P2òFJAOj0ziB*8r! Q$ܠ0 ]pdНm`'R.LX-ЅY&$ts׵T{ D)k-l o=ն,e4M{=įi(5[,Xf #a`ڜ_988&ᢔnK.[oe˖-Xqk_z|;M #%K-nb{^ovEfgn#~%_e=g\AV=yx ^x!k89Hd7tZE,co;|i̎2:6V)i4UE$)ش {9 h *jD)ЎEYgsJ) aX+ :l!9.̯J4 Ag:/,0hn#Bع#J)hۖ$IX,"c hEGY# Sa}UmruDm b)F&p;=Lk>&5Wr7f: x̛&."?^Ϊvbs9?7X0t'ɤ Re<9E,N&A@`45EQym0Sq]4Y@o#!zaˮ>w=B`4/x<e]th ǡn0_a' EqgS*sKkDGt%}?L̯ n*A@enx&H8c݈%? l,paF*ŀff0 UE&ijڊZ;]K7[H$۶uzf1eY_K}cѰ _XXpA0 ,=^4e0Cɬi"F9f,y`;ȣsOA4|;wOdͫ'\.˒my+/<3 0mKȟFe<˭ ;4eaaAk|HTMӠ' Dԝr=Nezsm(Ӽx<Ge ǡ{zQ< 7eX{~#MjE۶i9FnέMX)EѨBθPM, As]7ΉzͿ+!D&h}UU ZכuwҰ%p:8QJ(3Z4MpfRҙj[1т#~g'S5]{p[ao#W]u֭SNyxwl޼x¯ﺎ??_?/{8wۉuC&aFS1Dr`8 / IDATv;wY;hDuTdEoO"DJ)fgg^-0v{wm oL"x<%|x<]Oza]{eÚ1k)I hLHkCT BӶDqD۴Fn5P%qSM0;;L{!H:qSlݶ0A(A@Yf(@¶myCx[FQܪMgYFQ*1k{{i[߯JhDUU$IbŒUA.RJirl8&:C߰ך<ī^t ?zvg?Y.20dǎq˚*+\Yax׻ŕW^"~,{̭&$m{( )ʼng,.23U9GƳyQ`Ӿx<g#ZqpDO1{߸.n0i0*C耸zBF찿Ȳi\xl[I"o96.4Սܦi"3 H7M!yauk+UAUU i(ZۆHD8m #&kےQ i!~tvX_Q6&m$R=2C$qlȞ)eY$ t~f N䗟}*kgr;GQ7Oȁ+Ї><۶mcf戞s?A>Ou'nx Hy8?a(ϙ,N,3 yۦny~ }߻Ʉ(%4eA܄˾qk>Oyx<x<S5o u3w0q(t#Sauݐ9Jt}}OUWQ|6Ri۲KU8eçZAkYR RyA)e;*oK .(qwEeR`$ ' 0.mumBsT>UE`Vyenv uh;wq7oS\q\_YgYQfggW}=\oq_wwkEXxZ^Ⳑ!~.lܰyQNgUUMfc(<33n(cBb7Tқ`&̶Bty"C_j-_^Xx<eİ|0dqqiXedR;M(6|FEU>˩k|x<ϓ=TC|8 8aUW8ԭ~H(NܴXD$ AZD(^e \ P]EͺZ)cİ,X=j/uRG#ўf;$,\"KC& v"WuE"wކVRڟ,˲rnX)cڶu>岬H醉Yotu4MC"%EY4溮( )%s:xL]dYF]׌c:Ǵ&u3[9SBp# /˥^??!d<7C>K+l[xy\{nnƍGi=y{V佞g׼?޽{ ٻw\'׽L]?x87ϞV"NuHә$(=0nue۬myN10;;K4|{S@fx<<>G=y |fdUW8<|`ovGlȲJ,{} >Ii.P{G6n]ɀk&ZuRH:f:iJ)]ס:E7MC$Imb{3aR2$$Cq֣шaF{ݻcn~,M4Mi53UN)"b8=.kRRUG帵#^9Y}Ou7x#شi'tea-[py2pEq}5~A{nD$Q,iPͶmUGGE@YSvc:=24{`ˮN{=s͔Rcjf۲dl, M@UWtDDtGWCtGT|i}s>e)%w}N[ᆣRSiO!2tgk-;g>(=C`v0iG)%ҹ@J)v{=}H$D"o#H$983oȗ? EEXʲ"DpiJ6a_f$i֚qH7 4^Pz4I,X!c B^ϡxi:8N4MRc q diyF6[הE8Mi ai躎iB(' Cf֚f5}߇鰞ۢ,(Ejy7}9)'2 w8vyއozY5+rNp//qwfeڗ$ s3FP=[RJH 5FCB,K݁.ɿ֫u9*Ix_޵ D"H$τ.G"H$G_~p5p t6F#Oɻb.ئ|E k] VR*?LϐyhgYFQ#Bl(a_~h\0[I.xyS=k&4b1y.ڠKV)MUw-J50PeXoyZk,cѶ}ZEY"(ByN|>syn`/-MGeL繞[ax`Sz+P[n^7wzS|;9v_=:ea9gѣG->f%ۮ'R,NQZI8piچ,KIm\YJ7Y?*8M(٬g' B,KFx_޵];q?/D"H$r$D" |dKpÁ,(mxgϳ.Ypx$q I\[|Xp: *fa5mӠZx4 OR9}?}857$iB9,l$ c\ynNh78P0M{ {#* uygvNac-u]g7maBS|VxH)pyH4%aYu @۴üky5qr xG}g(뮻x׻Ž__~`^s\yϻsߌ.?{9Xo{ ,ł$$ .spDJkTEiX,a0e$elk拜?=<}r[8V~^$D"H$H$D5~#y]?豓ƃx[  7ϵmsv?$.kҮ$fktWCV)E[J)r$YFYvf6ma4M@iN?ZR*iz"g$jiMJ10yQ$"2r!H ,ZZߊvj~vk{:ԥT8 #ڸw20n`]U ~7^0)Th<}[[[\BWWs]~y/j/p{Z,s*+|b>$IH4l;)ݦ#ݣֆZrdwoHm o7MeYR܊6H$D"H;D"Q|u~V^yA;-:i4k}sVkMS#󓔡 0#d蝯( &9Qodi҅c ¹[y`ݵ&/ t+4N:6Y򜺪B<64@]WAqZK:mۧ3}scT>${ ?g}yc +_~gr\sG9rN뢋.ɓmOZ7FS`q}9aa{G"mHҔM!9u]{N$e8 4u]Q%k-mu]oOӔfCH)gڷ})n|?/D"H$Ήr$D"; />|K\v6x-E2|\>"Nb NES庪H3LƉ4KʷqGRN0#Yc3&SMij&ϝk֮?+<M(l6MZS5*m[5\-˒qrkj̷A"Ru$Q9MG60L$M]{i 6eYRWP,tBȺFIť[x Wҏǟ=er};CqמS/R4W^|ȩjh'k+%!I0yaZ1yPLE䂱AF6M8J{z>И[o7, '={ |߽H$D".G"H$rnW\p+: Jx>~8vrы3 8Rvv5We @>65,qaV6,(S`dYT,I*Y1?nnJVUN1k6f`mV|҅ܣwW5k0skyab8MTU8]&kbCY y8>MũF`G a wvvPRQV%$N%rx/}M!xt{| Ȳo<Ͽ}1˷Xo6;߲1k^I& ,;HY$IJdYJ&a0UI*)Y.hc>(21_9+YCݬ}'~^$D"H$rnr$D"ǣ-211Z=v?jō7b<ߵo%|Nk1}$>qcXC]ՀuO "wM}mH7' Tc $ ֿEˉ' a`-i{Ns+ϋD"H$D1\D"Hdyo󀧞_:>4[myVR) >06 IDAT0tUWXcRbNSڦ暋wxmr%ynwÉUw07׷0 -|;yᇹ꫿|s57_I]O&uE&RJFc-ēpaAi(7JAw{ڶ*dG4u̵x^܁?_܊]#yH$D"ȹ%ˑH$pϷ-mo{\d\z}?:tޑk%"3^crA0.$u؂cˢD*UUgsAQ̊ $BH>@ȚfuE[nyYU^eQuibaG 960N /C8?^yO0dY0uEUUd1={v|ibѶ.LRhlX.X g|/ ?/㖫/a'=O)~#Gp5ׄF٘__U~\nFkiJS7 ,)'I@fa9 UY=y.H<X,4" y.Ȅ )YOSLR" s_{l?B? ~^$D"H$rr$D"/<iශ'Gzq>ů#K..dYֆm4KR5<7Mu]uu]INhrb7˲, N[ʒbjr+u~+a`X h-}5.;rz57gNe~G?O,kf#;/,Knr`MDFQVF;'u mAC`%{{+1FC^3W%0`7[eEQ0N=}O|k n_$D"HEF #H$98p;50L{~Gv(\im` TD۶ ZI5Z4 «AwsPV[eYK u]8n]a 0!NͷH)9tP&ɻ'>Ƒ .c 4EFUUAGp:Se!zaMZk~y0kk Ij 4n4Cii^wJ7Mε c^;^ B'ywٷG>{^禛ni[|v_})Cķ34C+qD$iVhuz`oƟ֩.;+uB,:MS6]ak9sOp@$D"Hdr$D"S!櫏wωUϡEvȳ{y&N)Ƶ/BiEX&9U)]WQuq bk-0@PSw)5SJ&rNg)%j ch:-j.xpz\xl" 44pi*KgѶsI`:q!`YR5J)4=Y&ha#[k4eooWJRJɢ ymrEyvwu ׷Cu|ӟ]zO<^{-\p3#q@+|>˦ o/10) ,ʂra~ RNI|(]ěH]UeO1#%Ip3xh=Ysa*}/D"H$yH$D/ xS?pXky|xS\v,ɲ7lUU0RN/9ָvq8 G?J۶\uUo恮)KS~r!ܰɶm6Z4c\[~X 9pPQ%JJb]l-mےeYP@d}T21_8E2wj&Ҕi3? E"H$D"O #H$9x'ƅ큮,<3_y꣔!D6: S #(kTj`Z7 B:OsYk}Bs'IdeQxJ`%;㹭azqH %%m?qzṋCvq BTC?჋e:8eEhCp|soN n#.LP!$eA]^}閫Z4!M]( @\}=Sv<D"H$0p9D"=oO7tEgaՍ|ԗ W_z!)!4N(ڷsc\8A ႰIGZyHӔ0y:9蝃Zo'ϡjy3J)yc1cc [[KNڥ( !m7o\&<0󚍢(IZ:ʲBM&t XD{^{׹Vjkuhq _lۆESqݥW%xĊn8;[ ez^qibuRו ' A.r~@)MQI F]0`IIRa?8j svVm2 y9Q7s+"H$D".G"H$B#p/]Y&}y~K/*YQ%Y=J;MŢ]ެ«'Rjm|؅MSU !d<lYz@g0PEQ.!_pD0`Pu9oKd&8$0 #UUt4MMQ7Øu8uY(%Cl-E½r^, fFlof\P,K"!.ʋz7^qm8vrW/~+yٕG\+r*m]{^dXcYEN&24ua8Ld(DNy:wXX s͇$XHLTDYohZs=vOE"H$D"/1\D"Hʼnpp p݁,(mxg؝^#hUYQWF;WleTUReX\E Xum[UB`e˴o3+}M:+ ڦ!2w^\d^19]QnMSuʎ-RL佼 ${ k-r^߶-R)ąZk6Ţ {4 CЉ<k_ O)"xww y-WxnzsW]ny,YNi9J*,YL1 $0I[Ţ RIRIe$OIҊdl9/iRw$Ml:D.xg`m }B$D"H'ˑH$y]\#p HrAZ~9>v}<).?r*#K3ܐ&iddoo4*3\+v>,;=o-K):P8xB^oH,s{֕,W]woz"gN1)` W_rꛨ" x$IɷdY"k>@nfsz_eʽD1pߛ}"NQ}9Z4ND"H$#ˑH$| | xƅ́,<}ard, ?O0#{n 4B4M| aP: ʆySa_$A1o,C$mn!0 k<;gm0 o['LZB*f֚k5l"/0Ɛenʲb^Q.8J3 =msplBP(U.,rBti"BsS|{#K~7p;X`J$?=IwISe8eAV,چiM*OVy4qQ(/҄ik7|} |S>%u? E"H$D"G #H$ޥ7KC |0pZ[bFMS@>Ρy۸@xDڦ^3f^qf{Dc|P=stNu20y85fYZM]BOmӄwi-ƺYN%IBmX,l-R)V+78qS'I¦?k͉'F  <]Uke{Kx˯7&<.s}?ۋet9I5(Zk0ˉy$emܽ ap.r,eF,)'$[{[R)4eٸASӏNkwI`w~G8 x|?/D"H$9XbD">.dx p遮,?5k^DU䌣~b2eYYy̝7pOz=FQI|xZ%#:E8!l-jBqtm`QJ$C<-@4˂92A۾@wuk/σP>ujmC(th')qtabZeYZ%CJ_^hmu}Ѐ f7p߻:|Yjqiv*5}߻S)ZCYLC^y?jJ|nnJ$^ΥnX_UÁ(|>Nָ|oicH2$dB& >ɅI|y^4I!s:I~ H$D".G"H$ϛ-@~+: O<{kj9zIMh$BpE|M]#ZfaX6 ᛦrA8 weHJ=EYq] H<ۣ(h@Y90܄V~֚fZZV5;;H)20yNUҴ yVB}vC Z{eA4s u=eyϴ!8szݰM$Vk, Ve])}߳$aG( p*Hh-rLĕ\k_zoZ c'2zekJExw_cHHX,t}OQ$!M4( H5=;Xo.;E~KlW=~^$D"H$r~H$D"gwk59_}"\{EkcB71!`UJQ!~IRyh2MCqIڦaلOAUd^!`obowmzPEms;~XE"H$D"1\D"H$!;\xҍ/~ >v}/*,z )%I8Cf@hEiq ^#?c3\.ps5;֛uU!} $ 4&v4yeYaAʉIc,Ș&|.9dض IDATo&1^zCaw\G./mЏ̯#I{g{Nb)k ZiNb{{4Klkʪr벆Wx?ت'V'e>zdTUA()t8DEve=aGTUIl-2 $8Նd93kA;ʣϜxx+~^$D"H$r~H$D"x7R% ǎѿ6\rhI[^Mqi<#I pn- a; iJ亪GF5Ơf{k RJԩSL ]۶-`C6!N$'u6 S>L nC];4NeLdH8%joj[B;qV[4MT#zn4W>(e4MaPMQF!(qX,LDU0yowibADS\w~r Xxn.jn. wHP^ޙ".ZkƢXȲzRܴ nbhMeCr lɁXCO=*=s+.:O srPJxVW̏[׈,# + 1BG۶LRRWUMH 92h&X Lכf<=bd&AkE? µEm٬9\8'tcmV5Evk$Lɭ 6c6&~ 0o484MEA4 i5)1FPRRVm C?PF8yN? )͚Cmɛo6Ǐb{jQ̛nC[]>W{+Lt "Cd" GXr4@=V hۆ<[YQxu44{z$MS< _{;~p>D"H$p9D"ȷSn_~+: O/;Ʋ沋Rs_ikyNUל8q,ܫ$f4MT^㠔 Ǻ@6 j !łzr <k-84-FkN: J3#JiڶtDUi$% r pw,pQ È1\uAz.,臁ɇJk>a^a2bѲZQ[{QPΡ,D)ZiJQ}Gc &8mChmX,Z67LӔqC lh`4miz 4Qp4}r._R^raV'{V˿~+9ɲ,sCʲ IRa}i!rRnOY= 'I(XפIbᆲJa(r1Tm%IJ0 2A?JWw5O؟x$D"HEE #H$+$p'׀KtEg%_~|⮯mUbeɩ]RHZVG̎[m SLB,s4Eew/qbhtط,ckZeQ m?o,Kq@)b$I)Zi!MSN<`)|;TJvᷠz"+i"I\y'qgk ]`knooXq$K]3{zr?kkN7{B~pJ)C{,KCxm3Lǎ  MBS8ҮMT JJv׭\l=cȢk_E97{<@Q+EUy'Ir{C&$Ч\M"H$D"1\D"H$r1=.94> _EF&IBH)FCShpCЬ`?[[ػ`3*Pw?US<}pIӧ$a q(K&e󲷲,)ʒ`ȋ8NM\i}K5-/ Ð װi\ms؜znLdqի׸9ZVֺM̙3V+Q7 q>0ºuIEaH7>w,m>\$z+_aՓ}RKw}??񯼃?q+WKuc^>We&m?hWQnɡR} ÐMavy:|ɦzkv;hc(_=u !BޅB򗀿YnJ)ſλ_-gh~_V ;nYmp#'"M906FMQN6TUE(8::C֛ {%mۺy[ZkQHYhNv~OZ^~ i^o8s搲,kb,+Uylk04-]K 8X.99>: $9|h"Cunɝ1n7u%ۯ8֝m;7 n]@t''_~}Y8,\9nmT.N,{j&I\nxèYkM0ƺA7I=PUUX,,AZk,'E1Oj{Gt''+0d^3 #Mme9bG DUU\.GH[h|C۴Si("uł$Nhu@w=ַU?q.?ޏ EB!kB!\~8pUax*ps)٧,yq۶x͛j5TU1iSt]s«5} C! BPR`Og88hu0=yE1$I8YH,mivnf:ÍZfr9w7w]o]턵j0}w d;᭔rݶ\~iSP0X,EP|MQa@ӴDQ8u?kumX VSA8N@ES[Uq4yb[}P0gm:8dX̰eY>]߹ʒw=S'~KWw]^D!B>H,B[eo>9Wxoq`A(9tl6\A0WVDViж-8mOU] iw;l}M)+|?(r$adݒ@}7M"'sC_Kv(3/s,aH5MӰZ֚mX,sE8݃iY.UdQ"}7 0'sG!MS/;Ոx&¹[d5n:s w;/_Z 3iiƽu8+?O{4FnFhOMP75(8N}Bؽ]߳\.UUsaHdYNerSeG4={E^<7>>{G8-6Zm]UMnO1n}KYVsYڶ{\s\zM)򘪏@uK+Ɓ k-jNmkO3ML3L +G{/>]~lG\gvy!B"B!^+rWz8JyY>Exmgc4}?$1MLᭇoPy\p?M'qLUhϛk-©ٳ]Gv]>}ߣ,+>ME\b ^Et0=8QrU];lU"=/eIYU, a_b*4qS-zZ,+7ʋV3%~*V~Hus Mo}$qD)Q.YiW0UoXk9>>Zi&o;k-eYM뗯\ug7zC7nj7mF]**`,n0wTE]QeY>;NK" C=i9$NXk}WMZC6qo}&"6Y8y1EYbk9~( a`ܵGmwRZ{6i[)E^|'8]~-`B!_. !2n>Ǹu1]vRaEp|} op0 n^-iۖzoڮC\;(cjgYNt@Z)U i),<(ZKhmhXRQ^xGF\-Ͷ: nQ.5E:ʮ<6( 2qiju[COe,( ,<:Mp||dنq9{,yq!o:ՓmIwgyVs7 ۶m%}-3]avw?x V? <ˋ!B? Bz~d^nxSpqY Ǖer*ʪ" 9͋ ZS5T%00#E!M]c]DEU]O}nѠ'}0i M]>u>Q4i]ZW0yoN"/H)uU&y_ӸhN [ky_&vr|Nu%c UUMھYKGEKg5+[-h겢zm;$qaE2=">qi"B!Ae!B4W~8w'ntfQ7\K zۮcXm8R5}߱\.;(" acM$I2]Qsov_h'I8{;ͬ\r>M\o *j?ڶ&;˲, 8fZc! o w}OE @TU-[OD7Q<->'9J'-<ni+;_9k}qaoTeL!y8OGQDEe9J)'naF&#z\JQVQ0 $iJg~@Yey"Ek1Z4eu"Bq$r|'0dk*1νZ{ooIQSL"Mdن4Ipq]A2Mb]eI?0 ӮdZf ּx(0 !B7 BF4O)x'skxe>~\%wqkܾBkǸץ<#0Ҷ X-溋߷ 0-sAnYhyUUM [k"דEYu-eY,#M6 gϠ{XR.ߧ{ C4I}KӶspnnz#)K ug|Kq<;@ˋ!B7[/NB!cׁ:]@p'6|᧹DkTWds$nw72臁 |I(}}O*RVs@U(QõG"~u,k[~ec E3 =uUy(B%/ Mf0 ^QQρUK4YEZk麎bIwS(ڛk+4mK]W֟+-کzrYeTuM釁8臞v1yx7nB!U. !BZ|x/pd^nxSpcGCkř4Zam4-DѮ- H1:\9l2<ϛ'qaH㺮#ben^r%mfPh4u ap=ί,˩#Z{VkŴ|׈b #]sc uBc1XqhR.M$i;)aݴs3NS{{K}߳ܣ*<{ ;==͆8A)0,+ڵkA@4XcE^L>u]SV]߳X(u=RcÀz3Mz[8 C('mK.o[E80BY(8pmg8^gTM0hmmwv<~>.?Svy!B&B!ķ5emǣϾ}M~9{OUh1֐gԩ\%Ua\Hhϣ,Kfoo'"|'|(J3n^](\<:M1TuMn9_Uc*Ka$6)HNi@kjo-{{{i{v;"-l\%޼k /"B!$\B!Όz_fy'~pe> g]?O.e6"M|ߧ9U0WMnq1mi0<0#}'2Ȳ֢SEYG10m۲Z fÙC,K8*Zv{1>8ht臁|_ fkpm;ZCYVh*,A#~<9<8Dk3WVtmG߹ku]G&Լhm[֮n,9 ^o6hҾay;ǮOh$I\(i|ߟ\)W1qw="/tӢ?vˋ!BJ. !B|?=LV|e>uambGqLdY(-VPe0 .(r~8Xk~0!Y!lCHr t8wooIusFuXkY.hYtuUUϓUU:$1]ױ\y0 eYb^o^,Te00A5?ӷq./"B!ğ$B!w_1or'xؽU w٣k*|ߟ CMS-iP mDZE4Iȋ8h^ji]S%a1#a0 E^w=ztN1 =0b!yxGUU5B>dYFǔeIVkv Rdy8DaHӶtmGu,)Mp||rc끿B!S. !B\u28feuKW=pm]p,8sp]qZKEI4s۴-X)m۲Hh~:aaF)5Wel9>ܔR,*rI4M31e8qDDQHӴDQHTU]y3LKaԴњڦ<O?]e\R!B9 B!7|q̷bϾtħ^" zaprbV5}ߣqxU5IJyUIYSsO۵}bD)c G.5a$Y,sOvvJyiR(d;uu7z}R%gv4O-NNC44mX-Y AP,omPmdWLn`C߻zI|my./"B!wBe!B[C|x/ 6ͧy~O~YYs=/]'04 A]ב9y^$1iެ$NFcduBF @sosa8t}"M,0SMܿȈ$} UUϋ q=k гܣ+4eG0*+3kǁwy!B!S. !BZF }gsn.\ǿ8Ͽtĝۆ( )ˊE{{}8Q(ެi< mE^18[K? s\5A۠y.}1y.z(}7=45uϟ;G6M[uǑ /<}]"B!BiH,Bqzx?@eND8s/{k_;oCycPж-IF@QՕF@k1nٳ@Ao-E^`ZKy4M1+f׫< UU1#Zk()yr{0$sPc,vo΋/ 4׍u k,^ SB!BYH,Bڱ>"M2z#GazMgeI?R׮y{S$i10 YVQ ԟ1eUy<)M㦔1J (˒mZk-u4-J)Ej1njyB!. !BTwgND7 _>(Mq4`pYc>IPW5Sir8M{ڮß/C]t]֚}4-u]O)YGv8NȲqRJ)ڮ%}l:o~|^v^ !Bg%B!kWxx;p穞&oȇ G뜷~HXQ#38KYt]?MXߒg9a2NR4Iȋb^.^)q9yb@kM&A@]WF1mK4-8:>By;Wwy~8EB!pY!o|8TOt]?p>zϣ|ᎃI`?ʒt`Eys=!Mےnz:SBẮhh^g!c1}l8嫏=۳~0 !Bq˒pY!9ශ?TOm ߺ}_:ⶃ%IHyRm22Y~qS5aP%Ou7ڮ# C'yy^$2}cgXP%P5÷>W-逿|mB!Ae!Bק+ߘ~.?k+>'xe<(˒(X,R1EI4(OۥZkqU`%24eX0`¿", 8&2M.躖O|~wB!I,B> ~7|˺CK?C' CikG(]703#Q8MӠ"2)˒}(b G w]^@!B& B!*wzX_}>O<3L!֚"/k5m1,#"õY.Nfk3gHyr3/^OP6._ן-B!k:!BS4N,ߑ8k;?/nFmZoi4%s1(凹ʸpك%m!u]G=0`eGRA@UUa8`mfaH prX?ݻv~X"B!B삄B!Yව 43:=W<<ϣ*y'$ C)Rssux[7#gwRw"B!B슄B!be\wTOt/^_g_b?yg0 .q)˒ ˣ ܿˋ!BK. !B?i|%`(n*?<Al $I8(3u2 7>]?>!BK. !B2qUIDATC&E}.qCy  <}c~vM,^v\ >> from logbook.compat import redirect_logging, StreamHandler >>> import sys >>> StreamHandler(sys.stdout).push_application() >>> redirect_logging() >>> from logging import getLogger >>> log = getLogger('My Logger') >>> log.warn('This is a warning') [2015-10-05 19:13:37.524346] WARNING: My Logger: This is a warning Advanced Setup -------------- The way this is implemented is with a :class:`~logbook.compat.RedirectLoggingHandler`. This class is a handler for the old logging system that sends records via an internal logbook logger to the active logbook handlers. This handler can then be added to specific logging loggers if you want: >>> from logging import getLogger, StreamHandler >>> import sys >>> StreamHandler(sys.stdout).push_application() >>> mylog = getLogger('My Log') >>> from logbook.compat import RedirectLoggingHandler >>> mylog.addHandler(RedirectLoggingHandler()) >>> otherlog = getLogger('Other Log') >>> otherlog.warn('logging is deprecated') No handlers could be found for logger "Other Log" >>> mylog.warn('but logbook is awesome') [2015-10-05 19:13:37.524346] WARNING: My Log: but logbook is awesome Reverse Redirects ----------------- You can also redirect logbook records to logging, so the other way round. For this you just have to activate the :class:`~logbook.compat.LoggingHandler` for the thread or application:: from logbook import Logger from logbook.compat import LoggingHandler log = Logger('My app') with LoggingHandler(): log.warn('Going to logging') logbook-1.5.3/docs/conf.py000066400000000000000000000163611355165376200154270ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Logbook documentation build configuration file, created by # sphinx-quickstart on Fri Jul 23 16:54:49 2010. # # This file is execfile()d with the current directory set to its containing # dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.extend((os.path.abspath('.'), os.path.abspath('..'))) # -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Logbook' copyright = u'2010, Armin Ronacher, Georg Brandl' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. with open(os.path.join(os.path.dirname(__file__), "..", "logbook", "__version__.py")) as version_file: # can't use import here... version = release = version_file.read().strip().split("=")[1].strip()[1:-1] # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sheet' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { 'nosidebar': True, } # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ['.'] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". html_title = "Logbook" # A shorter title for the navigation bar. Default is the same as html_title. html_short_title = "Logbook " + release # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # html_add_permalinks = '' # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'Logbookdoc' # -- Options for LaTeX output ------------------------------------------------- # The paper size ('letter' or 'a4'). # latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). # latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, # documentclass [howto/manual]). latex_documents = [ ('index', 'Logbook.tex', u'Logbook Documentation', u'Armin Ronacher, Georg Brandl', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Additional stuff for the LaTeX preamble. # latex_preamble = '' # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output ------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'logbook', u'Logbook Documentation', [u'Armin Ronacher, Georg Brandl'], 1) ] intersphinx_mapping = { 'http://docs.python.org': None } logbook-1.5.3/docs/cookbook.rst000066400000000000000000000011701355165376200164600ustar00rootroot00000000000000Cookbook ======== Filtering Records Based on Extra Info ------------------------------------- .. code-block:: python # This code demonstrates the usage of the `extra` argument for log records to enable advanced filtering of records through handlers import logbook if __name__ == "__main__": only_interesting = logbook.FileHandler('/tmp/interesting.log', filter=lambda r, h: r.extra['interesting']) everything = logbook.FileHandler('/tmp/all.log', bubble=True) with only_interesting, everything: logbook.info('this is interesting', extra={'interesting': True}) logbook.info('this is not interesting') logbook-1.5.3/docs/designdefense.rst000066400000000000000000000240101355165376200174530ustar00rootroot00000000000000Design Principles ================= .. currentmodule:: logbook Logbook is a logging library that breaks many expectations people have in logging libraries to support paradigms we think are more suitable for modern applications than the traditional Java inspired logging system that can also be found in the Python standard library and many more programming languages. This section of the documentation should help you understand the design of Logbook and why it was implemented like this. No Logger Registry ------------------ Logbook is unique in that it has the concept of logging channels but that it does not keep a global registry of them. In the standard library's logging module a logger is attached to a tree of loggers that are stored in the logging module itself as global state. In logbook a logger is just an opaque object that might or might not have a name and attached information such as log level or customizations, but the lifetime and availability of that object is controlled by the person creating that logger. The registry is necessary for the logging library to give the user the ability to configure these loggers. Logbook has a completely different concept of dispatching from loggers to the actual handlers which removes the requirement and usefulness of such a registry. The advantage of the logbook system is that it's a cheap operation to create a logger and that a logger can easily be garbage collected to remove all traces of it. Instead Logbook moves the burden of delivering a log record from the log channel's attached log to an independent entity that looks at the context of the execution to figure out where to deliver it. Context Sensitive Handler Stack ------------------------------- Python has two builtin ways to express implicit context: processes and threads. What this means is that if you have a function that is passed no arguments at all, you can figure out what thread called the function and what process you are sitting in. Logbook supports this context information and lets you bind a handler (or more!) for such a context. This is how this works: there are two stacks available at all times in Logbook. The first stack is the process wide stack. It is manipulated with :class:`Handler.push_application` and :class:`Handler.pop_application` (and of course the context manager :class:`Handler.applicationbound`). Then there is a second stack which is per thread. The manipulation of that stack happens with :class:`Handler.push_thread`, :class:`Handler.pop_thread` and the :class:`Handler.threadbound` contextmanager. Let's take a WSGI web application as first example. When a request comes in your WSGI server will most likely do one of the following two things: either spawn a new Python process (or reuse a process in a pool), or create a thread (or again, reuse something that already exists). Either way, we can now say that the context of process id and thread id is our playground. For this context we can define a log handler that is active in this context only for a certain time. In pseudocode this would look like this:: def my_application(environ, start_response): my_handler = FileHandler(...) my_handler.push_thread() try: # whatever happens here in terms of logging is handled # by the `my_handler` handler. ... finally: my_handler.pop_thread() Because this is a lot to type, you can also use the `with` statement to do the very same:: def my_application(environ, start_response): with FileHandler(...).threadbound() as my_handler: # whatever happens here in terms of logging is handled # by the `my_handler` handler. ... Additionally there is another place where you can put handlers: directly onto a logging channel (for example on a :class:`Logger`). This stack system might seem like overkill for a traditional system, but it allows complete decoupling from the log handling system and other systems that might log messages. Let's take a GUI application rather than a web application. You have an application that starts up, shuts down and at any point in between might fail or log messages. The typical default behaviour here would be to log into a logfile. Fair enough, that's how these applications work. But what's the point in logging if not even a single warning happened? The traditional solution with the logging library from Python is to set the level high (like `ERROR` or `WARNING`) and log into a file. When things break, you have a look at the file and hope it contains enough information. When you are in full control of the context of execution with a stack based system like Logbook has, there is a lot more you can do. For example you could immediately after your application boots up instantiate a :class:`~logbook.FingersCrossedHandler`. This handler buffers *all* log records in memory and does not emit them at all. What's the point? That handler activates when a certain threshold is reached. For example, when the first warning occurs you can write the buffered messages as well as the warning that just happened into a logfile and continue logging from that point. Because there is no point in logging when you will never look at that file anyways. But that alone is not the killer feature of a stack. In a GUI application there is the point where we are still initializing the windowing system. So a file is the best place to log messages. But once we have the GUI initialized, it would be very helpful to show error messages to a user in a console window or a dialog. So what we can do is to initialize at that point a new handler that logs into a dialog. When then a long running tasks in the GUI starts we can move that into a separate thread and intercept all the log calls for that thread into a separate window until the task succeeded. Here such a setup in pseudocode:: from logbook import FileHandler, WARNING from logbook import FingersCrossedHandler def main(): # first we set up a handler that logs everything (including debug # messages, but only starts doing that when a warning happens default_handler = FingersCrossedHandler(FileHandler(filename, delay=True), WARNING) # this handler is now activated as the default handler for the # whole process. We do not bubble up to the default handler # that logs to stderr. with default_handler.applicationbound(bubble=False): # now we initialize the GUI of the application initialize_gui() # at that point we can hook our own logger in that intercepts # errors and displays them in a log window with gui.log_handler.applicationbound(): # run the gui mainloop gui.mainloop() This stack can also be used to inject additional information automatically into log records. This is also used to replace the need for custom log levels. No Custom Log Levels -------------------- This change over logging was controversial, even under the two original core developers. There clearly are use cases for custom log levels, but there is an inherent problem with then: they require a registry. If you want custom log levels, you will have to register them somewhere or parts of the system will not know about them. Now we just spent a lot of time ripping out the registry with a stack based approach to solve delivery problems, why introduce a global state again just for log levels? Instead we looked at the cases where custom log levels are useful and figured that in most situations custom log levels are used to put additional information into a log entry. For example it's not uncommon to have separate log levels to filter user input out of a logfile. We instead provide powerful tools to inject arbitrary additional data into log records with the concept of log processors. So for example if you want to log user input and tag it appropriately you can override the :meth:`Logger.process_record` method:: class InputLogger(Logger): def process_record(self, record): record.extra['kind'] = 'input' A handler can then use this information to filter out input:: def no_input(record, handler): return record.extra.get('kind') != 'input' with MyHandler().threadbound(filter=no_input): ... Injecting Context-Sensitive Information --------------------------------------- For many situations it's not only necessary to inject information on a per-channel basis but also for all logging calls from a given context. This is best explained for web applications again. If you have some libraries doing logging in code that is triggered from a request you might want to record the URL of that request for each log record so that you get an idea where a specific error happened. This can easily be accomplished by registering a custom processor when binding a handler to a thread:: def my_application(environ, start_reponse): def inject_request_info(record, handler): record.extra['path'] = environ['PATH_INFO'] with Processor(inject_request_info).threadbound(): with my_handler.threadbound(): # rest of the request code here ... Logging Compatibility --------------------- The last pillar of logbook's design is the compatibility with the standard libraries logging system. There are many libraries that exist currently that log information with the standard libraries logging module. Having two separate logging systems in the same process is counterproductive and will cause separate logfiles to appear in the best case or complete chaos in the worst. Because of that, logbook provides ways to transparently redirect all logging records into the logbook stack based record delivery system. That way you can even continue to use the standard libraries logging system to emit log messages and can take the full advantage of logbook's powerful stack system. If you are curious, have a look at :ref:`logging-compat`. logbook-1.5.3/docs/designexplained.rst000066400000000000000000000110731355165376200200200ustar00rootroot00000000000000The Design Explained ==================== This part of the documentation explains the design of Logbook in detail. This is not strictly necessary to make use of Logbook but might be helpful when writing custom handlers for Logbook or when using it in a more complex environment. Dispatchers and Channels ------------------------ Logbook does not use traditional loggers, instead a logger is internally named as :class:`~logbook.base.RecordDispatcher`. While a logger also has methods to create new log records, the base class for all record dispatchers itself only has ways to dispatch :class:`~logbook.LogRecord`\s to the handlers. A log record itself might have an attribute that points to the dispatcher that was responsible for dispatching, but it does not have to be. If a log record was created from the builtin :class:`~logbook.Logger` it will have the channel set to the name of the logger. But that itself is no requirement. The only requirement for the channel is that it's a string with some human readable origin information. It could be ``'Database'`` if the database issued the log record, it could be ``'Process-4223'`` if the process with the pid 4223 issued it etc. For example if you are logging from the :func:`logbook.log` function they will have a cannel set, but no dispatcher: >>> from logbook import TestHandler, warn >>> handler = TestHandler() >>> handler.push_application() >>> warn('This is a warning') >>> handler.records[0].channel 'Generic' >>> handler.records[0].dispatcher is None True If you are logging from a custom logger, the channel attribute points to the logger for as long this logger class is not garbage collected: >>> from logbook import Logger, TestHandler >>> logger = Logger('Console') >>> handler = TestHandler() >>> handler.push_application() >>> logger.warn('A warning') >>> handler.records[0].dispatcher is logger True You don't need a record dispatcher to dispatch a log record though. The default dispatching can be triggered from a function :func:`~logbook.base.dispatch_record`: >>> from logbook import dispatch_record, LogRecord, INFO, StreamHandler >>> import sys >>> record = LogRecord('My channel', INFO, 'Hello World!') >>> dispatch_record(record) [2015-10-05 19:18:52.211472] INFO: My channel: Hello World! It is pretty common for log records to be created without a dispatcher. Here some common use cases for log records without a dispatcher: - log records that were redirected from a different logging system such as the standard library's :mod:`logging` module or the :mod:`warnings` module. - log records that came from different processes and do not have a dispatcher equivalent in the current process. - log records that came from over the network. The Log Record Container ------------------------ The :class:`~logbook.LogRecord` class is a simple container that holds all the information necessary for a log record. Usually they are created from a :class:`~logbook.Logger` or one of the default log functions (:func:`logbook.warn` etc.) and immediately dispatched to the handlers. The logger will apply some additional knowledge to figure out where the record was created from and if a traceback information should be attached. Normally if log records are dispatched they will be closed immediately after all handlers had their chance to write it down. On closing, the interpreter frame and traceback object will be removed from the log record to break up circular dependencies. Sometimes however it might be necessary to keep log records around for a longer time. Logbook provides three different ways to accomplish that: 1. Handlers can set the :attr:`~logbook.LogRecord.keep_open` attribute of a log record to `True` so that the record dispatcher will not close the object. This is for example used by the :class:`~logbook.TestHandler` so that unittests can still access interpreter frames and traceback objects if necessary. 2. Because some information on the log records depends on the interpreter frame (such as the location of the log call) it is possible to pull that related information directly into the log record so that it can safely be closed without losing that information (see :meth:`~logbook.LogRecord.pull_information`). 3. Last but not least, log records can be converted to dictionaries and recreated from these. It is also possible to make these dictionaries safe for JSON export which is used by the :class:`~logbook.ticketing.TicketingHandler` to store information in a database or the :class:`~logbook.more.MultiProcessingHandler` to send information between processes. logbook-1.5.3/docs/features.rst000066400000000000000000000134641355165376200165010ustar00rootroot00000000000000What does it do? ================ Although the Python standard library provides a logging system, you should consider having a look at Logbook for your applications. We think it will work out for you and be fun to use :) Logbook leverages some features of Python that are not available in older Python releases. Logbook currently requires Python 2.7 or higher including Python 3 (3.3 or higher, 3.2 and lower is not supported). Core Features ------------- - Logbook is based on the concept of loggers that are extensible by the application. - Each logger and handler, as well as other parts of the system, may inject additional information into the logging record that improves the usefulness of log entries. - Handlers can be set on an application-wide stack as well as a thread-wide stack. Setting a handler does not replace existing handlers, but gives it higher priority. Each handler has the ability to prevent records from propagating to lower-priority handlers. - Logbook comes with a quick optional configuration that spits all the information to stderr in a useful manner (by setting the LOGBOOK_INSTALL_DEFAULT_HANDLER environment variable). This is useful for webapps, for example. - All of the built-in handlers have a useful default configuration applied with formatters that provide all the available information in a format that makes the most sense for the given handler. For example, a default stream handler will try to put all the required information into one line, whereas an email handler will split it up into nicely formatted ASCII tables that span multiple lines. - Logbook has built-in handlers for streams, arbitrary files, files with time and size based rotation, a handler that delivers mails, a handler for the syslog daemon as well as the NT log file. - There is also a special "fingers crossed" handler that, in combination with the handler stack, has the ability to accumulate all logging messages and will deliver those in case a severity level was exceeded. For example, it can withhold all logging messages for a specific request to a web application until an error record appears, in which case it will also send all withheld records to the handler it wraps. This way, you can always log lots of debugging records, but only get see them when they can actually tell you something of interest. - It is possible to inject a handler for testing that records messages for assertions. - Logbook was designed to be fast and with modern Python features in mind. For example, it uses context managers to handle the stack of handlers as well as new-style string formatting for all of the core log calls. - Builtin support for ZeroMQ, RabbitMQ, Redis and other means to distribute log messages between heavily distributed systems and multiple processes. - The Logbook system does not depend on log levels. In fact, custom log levels are not supported, instead we strongly recommend using logging subclasses or log processors that inject tagged information into the log record for this purpose. - :pep:`8` naming and code style. Advantages over Logging ----------------------- If properly configured, Logbook's logging calls will be very cheap and provide a great performance improvement over an equivalent configuration of the standard library's logging module. While for some parts we are not quite at performance we desire, there will be some further performance improvements in the upcoming versions. It also supports the ability to inject additional information for all logging calls happening in a specific thread or for the whole application. For example, this makes it possible for a web application to add request-specific information to each log record such as remote address, request URL, HTTP method and more. The logging system is (besides the stack) stateless and makes unit testing it very simple. If context managers are used, it is impossible to corrupt the stack, so each test can easily hook in custom log handlers. Cooperation ----------- Logbook is an addon library to Python and working in an area where there are already a couple of contestants. First of all there is the standard library's :mod:`logging` module, secondly there is also the :mod:`warnings` module which is used internally in Python to warn about invalid uses of APIs and more. We know that there are many situations where you want to use either of them. Be it that they are integrated into a legacy system, part of a library outside of your control or just because they are a better choice. Because of that, Logbook is two-way compatible with :mod:`logging` and one-way compatible with :mod:`warnings`. If you want, you can let all logging calls redirect to the logbook handlers or the other way round, depending on what your desired setup looks like. That way you can enjoy the best of both worlds. It should be Fun ---------------- Logging should be fun. A good log setup makes debugging easier when things go rough. For good results you really have to start using logging before things actually break. Logbook comes with a couple of unusual log handlers to bring the fun back to logging. You can log to your personal twitter feed, you can log to mobile devices, your desktop notification system and more. Logbook in a Nutshell --------------------- This is how easy it is to get started with Logbook:: from logbook import warn, StreamHandler import sys StreamHandler(sys.stdout).push_application() warn('This is a warning') Roadmap ------- Here a list of things you can expect in upcoming versions: - c implementation of the internal stack management and record dispatching for higher performance. - a ticketing log handler that creates tickets in trac and redmine. - a web frontend for the ticketing database handler. logbook-1.5.3/docs/index.rst000066400000000000000000000027051355165376200157660ustar00rootroot00000000000000Welcome to Logbook ================== Logbook is a logging system for Python that replaces the standard library's logging module. It was designed with both complex and simple applications in mind and the idea to make logging fun: >>> from logbook import Logger, StreamHandler >>> import sys >>> StreamHandler(sys.stdout).push_application() >>> log = Logger('Logbook') >>> log.info('Hello, World!') [2015-10-05 18:55:56.937141] INFO: Logbook: Hello, World! What makes it fun? What about getting log messages on your phone or desktop notification system? :ref:`Logbook can do that `. Feedback is appreciated. The docs here only show a tiny, tiny feature set and can be incomplete. We will have better docs soon, but until then we hope this gives a sneak peek about how cool Logbook is. If you want more, have a look at the comprehensive suite of tests. Documentation ------------- .. toctree:: :maxdepth: 2 features quickstart setups stacks performance libraries unittesting ticketing compat api/index designexplained designdefense cookbook changelog Project Information ------------------- .. cssclass:: toctree-l1 * `Download from PyPI`_ * `Master repository on GitHub`_ * `Mailing list`_ * IRC: ``#pocoo`` on freenode .. _Download from PyPI: https://pypi.org/pypi/Logbook .. _Master repository on GitHub: https://github.com/getlogbook/logbook .. _Mailing list: http://groups.google.com/group/pocoo-libs logbook-1.5.3/docs/libraries.rst000066400000000000000000000121461355165376200166330ustar00rootroot00000000000000Logbook in Libraries ==================== Logging becomes more useful the higher the number of components in a system that are using it. Logbook itself is not a widely supported library so far, but a handful of libraries are using the :mod:`logging` already which can be redirected to Logbook if necessary. Logbook itself is easier to support for libraries than logging because it does away with the central logger registry and can easily be mocked in case the library is not available. Mocking Logbook --------------- If you want to support Logbook in your library but not depend on it you can copy/paste the following piece of code. It will attempt to import logbook and create a :class:`~logbook.Logger` and if it fails provide a class that just swallows all calls:: try: from logbook import Logger except ImportError: class Logger(object): def __init__(self, name, level=0): self.name = name self.level = level debug = info = warn = warning = notice = error = exception = \ critical = log = lambda *a, **kw: None log = Logger('My library') Best Practices -------------- - A library that wants to log to the Logbook system should generally be designed to provide an interface to the record dispatchers it is using. That does not have to be a reference to the record dispatcher itself, it is perfectly fine if there is a toggle to switch it on or off. - The channel name should be readable and descriptive. - For example, if you are a database library that wants to use the logging system to log all SQL statements issued in debug mode, you can enable and disable your record dispatcher based on that debug flag. - Libraries should never set up log setups except temporarily on a per-thread basis if it never changes the stack for a longer duration than a function call in a library. For example, hooking in a null handler for a call to a noisy function is fine, changing the global stack in a function and not reverting it at the end of the function is bad. Example Setup ------------- Consider how your logger should be configured by default. Users familiar with :mod:`logging` from the standard library probably expect your logger to be disabled by default:: import yourmodule import logbook yourmodule.logger.enable() def main(): ... yourmodule.something() ... if __name__ == '__main__': with logbook.StderrHandler(): main() or set to a high level (e.g. `WARNING`) by default, allowing them to opt in to more detail if desired:: import yourmodule import logbook yourmodule.logger.level = logbook.WARNING def main(): ... yourmodule.something() ... if __name__ == '__main__': with logbook.StderrHandler(): main() Either way, make sure to document how your users can enable your logger, including basic use of logbook handlers. Some users may want to continue using :mod:`logging`, so you may want to link to :class:`~logbook.compat.LoggingHandler`. Multiple Logger Example Setup ----------------------------- You may want to use multiple loggers in your library. It may be worthwhile to add a logger group to allow the level or disabled attributes of all your loggers to be set at once. For example, your library might look something like this: .. code-block:: python :caption: yourmodule/__init__.py from .log import logger_group .. code-block:: python :caption: yourmodule/log.py import logbook logger_group = logbook.LoggerGroup() logger_group.level = logbook.WARNING .. code-block:: python :caption: yourmodule/engine.py from logbook import Logger from .log import logger_group logger = Logger('yourmodule.engine') logger_group.add_logger(logger) .. code-block:: python :caption: yourmodule/parser.py from logbook import Logger from .log import logger_group logger = Logger('yourmodule.parser') logger_group.add_logger(logger) The library user can then choose what level of logging they would like from your library:: import logbook import yourmodule yourmodule.logger_group.level = logbook.INFO They might only want to see debug messages from one of the loggers:: import logbook import yourmodule yourmodule.engine.logger.level = logbook.DEBUG Debug Loggers ------------- Sometimes you want to have loggers in place that are only really good for debugging. For example you might have a library that does a lot of server/client communication and for debugging purposes it would be nice if you can enable/disable that log output as necessary. In that case it makes sense to create a logger and disable that by default and give people a way to get hold of the logger to flip the flag. Additionally you can override the :attr:`~logbook.Logger.disabled` flag to automatically set it based on another value:: class MyLogger(Logger): @property def disabled(self): return not database_connection.debug database_connection.logger = MyLogger('mylibrary.dbconnection') logbook-1.5.3/docs/make.bat000066400000000000000000000100141355165376200155220ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Logbook.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Logbook.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end logbook-1.5.3/docs/performance.rst000066400000000000000000000062321355165376200171570ustar00rootroot00000000000000Performance Tuning ================== The more logging calls you add to your application and libraries, the more overhead will you introduce. There are a couple things you can do to remedy this behavior. Debug-Only Logging ------------------ There are debug log calls, and there are debug log calls. Some debug log calls would sometimes be interesting in a production environment, others really only if you are on your local machine fiddling around with the code. Logbook internally makes sure to process as little of your logging call as necessary, but it will still have to walk the current stack to figure out if there are any active handlers or not. Depending on the number of handlers on the stack, the kind of handler etc, there will be more or less processed. Generally speaking a not-handled logging call is cheap enough that you don't have to care about it. However there is not only your logging call, there might also be some data you have to process for the record. This will always be processed, even if the log record ends up being discarded. This is where the Python ``__debug__`` feature comes in handy. This variable is a special flag that is evaluated at the time where Python processes your script. It can eliminate code completely from your script so that it does not even exist in the compiled bytecode (requires Python to be run with the ``-O`` switch):: if __debug__: info = get_wallcalculate_debug_info() logger.debug("Call to response() failed. Reason: {0}", info) Keep the Fingers Crossed ------------------------ Do you really need the debug info? In case you find yourself only looking at the logfiles when errors occurred it would be an option to put in the :class:`~logbook.FingersCrossedHandler`. Logging into memory is always cheaper than logging on a filesystem. Keep the Stack Static --------------------- Whenever you do a push or pop from one of the stacks you will invalidate an internal cache that is used by logbook. This is an implementation detail, but this is how it works for the moment. That means that the first logging call after a push or pop will have a higher impact on the performance than following calls. That means you should not attempt to push or pop from a stack for each logging call. Make sure to do the pushing and popping only as needed. (start/end of application/request) Disable Introspection --------------------- By default Logbook will try to pull in the interpreter frame of the caller that invoked a logging function. While this is a fast operation that usually does not slow down the execution of your script it also means that for certain Python implementations it invalidates assumptions a JIT compiler might have made of the function body. Currently this for example is the case for applications running on pypy. If you would be using a stock logbook setup on pypy, the JIT wouldn't be able to work properly. In case you don't need the frame based information (name of module, calling function, filename, line number) you can disable the introspection feature:: from logbook import Flags with Flags(introspection=False): # all logging calls here will not use introspection ... logbook-1.5.3/docs/quickstart.rst000066400000000000000000000214251355165376200170510ustar00rootroot00000000000000Quickstart ========== .. currentmodule:: logbook Logbook makes it very easy to get started with logging. Just import the logger class, create yourself a logger and you are set: >>> from logbook import Logger, StreamHandler >>> import sys >>> StreamHandler(sys.stdout).push_application() >>> log = Logger('My Awesome Logger') >>> log.warn('This is too cool for stdlib') [2015-10-05 19:02:03.575723] WARNING: My Awesome Logger: This is too cool for stdlib A logger is a so-called :class:`~logbook.base.RecordDispatcher`, which is commonly referred to as a "logging channel". The name you give such a channel is up to you and need not be unique although it's a good idea to keep it unique so that you can filter by it if you want. The basic interface is similar to what you may already know from the standard library's :mod:`logging` module. There are several logging levels, available as methods on the logger. The levels -- and their suggested meaning -- are: * ``critical`` -- for errors that lead to termination * ``error`` -- for errors that occur, but are handled * ``warning`` -- for exceptional circumstances that might not be errors * ``notice`` -- for non-error messages you usually want to see * ``info`` -- for messages you usually don't want to see * ``debug`` -- for debug messages Each of these levels is available as method on the :class:`Logger`. Additionally the ``warning`` level is aliased as :meth:`~Logger.warn`. Alternatively, there is the :meth:`~Logger.log` method that takes the logging level (string or integer) as an argument. Handlers -------- Each call to a logging method creates a log *record* which is then passed to *handlers*, which decide how to store or present the logging info. There are a multitude of available handlers, and of course you can also create your own: * :class:`StreamHandler` for logging to arbitrary streams * :class:`StderrHandler` for logging to stderr * :class:`FileHandler`, :class:`MonitoringFileHandler`, :class:`RotatingFileHandler` and :class:`TimedRotatingFileHandler` for logging to files * :class:`MailHandler` and :class:`GMailHandler` for logging via e-mail * :class:`SyslogHandler` for logging to the syslog daemon * :class:`NTEventLogHandler` for logging to the Windows NT event log On top of those there are a couple of handlers for special use cases: * :class:`logbook.FingersCrossedHandler` for logging into memory and delegating information to another handler when a certain level was exceeded, otherwise discarding all buffered records. * :class:`logbook.more.TaggingHandler` for dispatching log records that are tagged (used in combination with a :class:`logbook.more.TaggingLogger`) * :class:`logbook.queues.ZeroMQHandler` for logging to ZeroMQ * :class:`logbook.queues.RedisHandler` for logging to Redis * :class:`logbook.queues.MultiProcessingHandler` for logging from a child process to a handler from the outer process. * :class:`logbook.queues.ThreadedWrapperHandler` for moving the actual handling of a handler into a background thread and using a queue to deliver records to that thread. * :class:`logbook.notifiers.GrowlHandler` and :class:`logbook.notifiers.LibNotifyHandler` for logging to the OS X Growl or the linux notification daemon. * :class:`logbook.notifiers.BoxcarHandler` for logging to `boxcar`_. * :class:`logbook.more.TwitterHandler` for logging to twitter. * :class:`logbook.more.ExternalApplicationHandler` for logging to an external application such as the OS X ``say`` command. * :class:`logbook.ticketing.TicketingHandler` for creating tickets from log records in a database or other data store. .. _boxcar: http://boxcar.io/ Registering Handlers -------------------- So how are handlers registered? If you are used to the standard Python logging system, it works a little bit differently here. Handlers can be registered for a thread or for a whole process or individually for a logger. However, it is strongly recommended not to add handlers to loggers unless there is a very good use case for that. If you want errors to go to syslog, you can set up logging like this:: from logbook import SyslogHandler error_handler = SyslogHandler('logbook example', level='ERROR') with error_handler.applicationbound(): # whatever is executed here and an error is logged to the # error handler ... This will send all errors to the syslog but warnings and lower record levels still to stderr. This is because the handler is not bubbling by default which means that if a record is handled by the handler, it will not bubble up to a higher handler. If you want to display all records on stderr, even if they went to the syslog you can enable bubbling by setting *bubble* to ``True``:: from logbook import SyslogHandler error_handler = SyslogHandler('logbook example', level='ERROR', bubble=True) with error_handler.applicationbound(): # whatever is executed here and an error is logged to the # error handler but it will also bubble up other handles. ... So what if you want to only log errors to the syslog and nothing to stderr? Then you can combine this with a :class:`NullHandler`:: from logbook import SyslogHandler, NullHandler error_handler = SyslogHandler('logbook example', level='ERROR') null_handler = NullHandler() with null_handler.applicationbound(): with error_handler.applicationbound(): # errors now go to the error_handler and everything else # is swallowed by the null handler so nothing ends up # on the default stderr handler ... Record Processors ----------------- What makes logbook interesting is the ability to automatically process log records. This is handy if you want additional information to be logged for everything you do. A good example use case is recording the IP of the current request in a web application. Or, in a daemon process you might want to log the user and working directory of the process. A context processor can be injected at two places: you can either bind a processor to a stack like you do with handlers or you can override the override the :meth:`.RecordDispatcher.process_record` method. Here an example that injects the current working directory into the `extra` dictionary of a log record:: import os from logbook import Processor def inject_cwd(record): record.extra['cwd'] = os.getcwd() with my_handler.applicationbound(): with Processor(inject_cwd).applicationbound(): # everything logged here will have the current working # directory in the log record. ... The alternative is to inject information just for one logger in which case you might want to subclass it:: import os class MyLogger(logbook.Logger): def process_record(self, record): logbook.Logger.process_record(self, record) record.extra['cwd'] = os.getcwd() Configuring the Logging Format ------------------------------ All handlers have a useful default log format you don't have to change to use logbook. However if you start injecting custom information into log records, it makes sense to configure the log formatting so that you can see that information. There are two ways to configure formatting: you can either just change the format string or hook in a custom format function. All the handlers that come with logbook and that log into a string use the :class:`~logbook.StringFormatter` by default. Their constructors accept a format string which sets the :attr:`logbook.Handler.format_string` attribute. You can override this attribute in which case a new string formatter is set: >>> from logbook import StderrHandler >>> handler = StderrHandler() >>> handler.format_string = '{record.channel}: {record.message}' >>> handler.formatter Alternatively you can also set a custom format function which is invoked with the record and handler as arguments: >>> def my_formatter(record, handler): ... return record.message ... >>> handler.formatter = my_formatter The format string used for the default string formatter has one variable called `record` available which is the log record itself. All attributes can be looked up using the dotted syntax, and items in the `extra` dict looked up using brackets. Note that if you are accessing an item in the extra dict that does not exist, an empty string is returned. Here is an example configuration that shows the current working directory from the example in the previous section:: handler = StderrHandler(format_string= '{record.channel}: {record.message) [{record.extra[cwd]}]') In the :mod:`~logbook.more` module there is a formatter that uses the Jinja2 template engine to format log records, especially useful for multi-line log formatting such as mails (:class:`~logbook.more.JinjaFormatter`). logbook-1.5.3/docs/setups.rst000066400000000000000000000206031355165376200161770ustar00rootroot00000000000000Common Logbook Setups ===================== This part of the documentation shows how you can configure Logbook for different kinds of setups. Desktop Application Setup ------------------------- If you develop a desktop application (command line or GUI), you probably have a line like this in your code:: if __name__ == '__main__': main() This is what you should wrap with a ``with`` statement that sets up your log handler:: from logbook import FileHandler log_handler = FileHandler('application.log') if __name__ == '__main__': with log_handler.applicationbound(): main() Alternatively you can also just push a handler in there:: from logbook import FileHandler log_handler = FileHandler('application.log') log_handler.push_application() if __name__ == '__main__': main() Please keep in mind that you will have to pop the handlers in reverse order if you want to remove them from the stack, so it is recommended to use the context manager API if you plan on reverting the handlers. Web Application Setup --------------------- Typical modern web applications written in Python have two separate contexts where code might be executed: when the code is imported, as well as when a request is handled. The first case is easy to handle, just push a global file handler that writes everything into a file. But Logbook also gives you the ability to improve upon the logging. For example, you can easily create yourself a log handler that is used for request-bound logging that also injects additional information. For this you can either subclass the logger or you can bind to the handler with a function that is invoked before logging. The latter has the advantage that it will also be triggered for other logger instances which might be used by a different library. Here is a simple WSGI example application that showcases sending error mails for errors happened during a WSGI application:: from logbook import MailHandler mail_handler = MailHandler('errors@example.com', ['admin@example.com'], format_string=u'''\ Subject: Application Error at {record.extra[url]} Message type: {record.level_name} Location: {record.filename}:{record.lineno} Module: {record.module} Function: {record.func_name} Time: {record.time:%Y-%m-%d %H:%M:%S} Remote IP: {record.extra[ip]} Request: {record.extra[url]} [{record.extra[method]}] Message: {record.message} ''', bubble=True) def application(environ, start_response): request = Request(environ) def inject_info(record, handler): record.extra.update( ip=request.remote_addr, method=request.method, url=request.url ) with mail_handler.threadbound(processor=inject_info): # standard WSGI processing happens here. If an error # is logged, a mail will be sent to the admin on # example.com ... Deeply Nested Setups -------------------- If you want deeply nested logger setups, you can use the :class:`~logbook.NestedSetup` class which simplifies that. This is best explained using an example:: import os from logbook import NestedSetup, NullHandler, FileHandler, \ MailHandler, Processor def inject_information(record): record.extra['cwd'] = os.getcwd() # a nested handler setup can be used to configure more complex setups setup = NestedSetup([ # make sure we never bubble up to the stderr handler # if we run out of setup handling NullHandler(), # then write messages that are at least warnings to a logfile FileHandler('application.log', level='WARNING'), # errors should then be delivered by mail and also be kept # in the application log, so we let them bubble up. MailHandler('servererrors@example.com', ['admin@example.com'], level='ERROR', bubble=True), # while we're at it we can push a processor on its own stack to # record additional information. Because processors and handlers # go to different stacks it does not matter if the processor is # added here at the bottom or at the very beginning. Same would # be true for flags. Processor(inject_information) ]) Once such a complex setup is defined, the nested handler setup can be used as if it was a single handler:: with setup.threadbound(): # everything here is handled as specified by the rules above. ... Distributed Logging ------------------- For applications that are spread over multiple processes or even machines logging into a central system can be a pain. Logbook supports ZeroMQ to deal with that. You can set up a :class:`~logbook.queues.ZeroMQHandler` that acts as ZeroMQ publisher and will send log records encoded as JSON over the wire:: from logbook.queues import ZeroMQHandler handler = ZeroMQHandler('tcp://127.0.0.1:5000') Then you just need a separate process that can receive the log records and hand it over to another log handler using the :class:`~logbook.queues.ZeroMQSubscriber`. The usual setup is this:: from logbook.queues import ZeroMQSubscriber subscriber = ZeroMQSubscriber('tcp://127.0.0.1:5000') with my_handler: subscriber.dispatch_forever() You can also run that loop in a background thread with :meth:`~logbook.queues.ZeroMQSubscriber.dispatch_in_background`:: from logbook.queues import ZeroMQSubscriber subscriber = ZeroMQSubscriber('tcp://127.0.0.1:5000') subscriber.dispatch_in_background(my_handler) If you just want to use this in a :mod:`multiprocessing` environment you can use the :class:`~logbook.queues.MultiProcessingHandler` and :class:`~logbook.queues.MultiProcessingSubscriber` instead. They work the same way as the ZeroMQ equivalents but are connected through a :class:`multiprocessing.Queue`:: from multiprocessing import Queue from logbook.queues import MultiProcessingHandler, \ MultiProcessingSubscriber queue = Queue(-1) handler = MultiProcessingHandler(queue) subscriber = MultiProcessingSubscriber(queue) There is also the possibility to log into a Redis instance using the :class:`~logbook.queues.RedisHandler`. To do so, you just need to create an instance of this handler as follows:: import logbook from logbook.queues import RedisHandler handler = RedisHandler() l = logbook.Logger() with handler: l.info('Your log message') With the default parameters, this will send a message to redis under the key redis. Redirecting Single Loggers -------------------------- If you want to have a single logger go to another logfile you have two options. First of all you can attach a handler to a specific record dispatcher. So just import the logger and attach something:: from yourapplication.yourmodule import logger logger.handlers.append(MyHandler(...)) Handlers attached directly to a record dispatcher will always take precedence over the stack based handlers. The bubble flag works as expected, so if you have a non-bubbling handler on your logger and it always handles, it will never be passed to other handlers. Secondly you can write a handler that looks at the logging channel and only accepts loggers of a specific kind. You can also do that with a filter function:: handler = MyHandler(filter=lambda r, h: r.channel == 'app.database') Keep in mind that the channel is intended to be a human readable string and is not necessarily unique. If you really need to keep loggers apart on a central point you might want to introduce some more meta information into the extra dictionary. You can also compare the dispatcher on the log record:: from yourapplication.yourmodule import logger handler = MyHandler(filter=lambda r, h: r.dispatcher is logger) This however has the disadvantage that the dispatcher entry on the log record is a weak reference and might go away unexpectedly and will not be there if log records are sent to a different process. Last but not least you can check if you can modify the stack around the execution of the code that triggers that logger For instance if the logger you are interested in is used by a specific subsystem, you can modify the stacks before calling into the system. logbook-1.5.3/docs/sheet/000077500000000000000000000000001355165376200152315ustar00rootroot00000000000000logbook-1.5.3/docs/sheet/layout.html000066400000000000000000000016271355165376200174420ustar00rootroot00000000000000{% extends "basic/layout.html" %} {% block extrahead %} {% if online %} {% endif %} {% endblock %} {% block header %}
{% endblock %} {% block footer %} {% if online %} Fork me on GitHub {% endif %} {{ super() }}
{% endblock %} logbook-1.5.3/docs/sheet/static/000077500000000000000000000000001355165376200165205ustar00rootroot00000000000000logbook-1.5.3/docs/sheet/static/background.png000066400000000000000000000065701355165376200213550ustar00rootroot00000000000000PNG  IHDR**C8 tEXtSoftwareAdobe ImageReadyqe< IDATxn> %4MN6~!d$/lÉ/v$\zRU9bepF55qS?yj~ ιfW/=|v?lVʹoONONO,eYeyIЄη~tsBz{:>ɽH~o1* #aydyzG/bl`{ksNuSu; ׷br뺾z LfnzqtDiЯxy4}E IXO kM=H P"-*݆16~y/1K%hbQyQI{@n藸Sr\b/rC9} W^7ۺZ^VGGGE#p+{uMmIl10-hYﺎk^ۆƭ{cP e$Ţ Ԇmq,*MD-& )[g`f'R0~5,CkXÆJ~ _elۖk\^]Zn-9'ǥ##9*+˲mOZ/0&yY$'yX05}>Et:XN~Jy]4f!Zz6ͬ>$IHNOO޼sNUWWWMmUSXͺ[rO>d>_h0ӧϟhü+'O {_FzvzZ}/OHhr|9yS"E+-3N?]|dLGpʛ"'"өv$fV/۶aYF_oꫯ_\^>8>>WFm9bq~~!M&# ׊Nu]ɊHh$MNʵn6>9=z*~l_pʒ#j{?~睳/Ƀٌ䋾`kjCycې%i2ϧ) S n*lEߒ.~a٢Q%pYQ wIR貢Y^y ]Hz'JX\ѯg%9$ib r *^"㻵5+]-]QJoAq,/EҮcW>Qd$pagr̢pL=WᣭQP$XܼK^!Ϩ:$\^ouDZ Q>̊UoYeڍ T4fdY]%=Ͳ[[EU->GBٖ 09nyْ(a1t0SLG?hu7R1ǹlbUA{!5HZ,2}Ed;{$访kt]',aMvmW+F&xbEZNu?6XtϽ!qp\oL1C\&hEf\VҊo;um`A{ WKJ}HcߊM"b"2X뮶V׶-}xr_U4ŒIZOV9~W ΙgK%I2e2^q"u:2Π4_;u 0!}vY!yُ64FEڈU_f(·dW# Ę/40вWC&cEp-'MwI@+vKZԫ4l"4_4k,^fHhӗc,. Lqa>d 7BMLF &ZFO򜓉,˴C]hP&k2/w \7u-i:.}G=YxH*{J"I]5mKӇځBRڵHڶ-RZ="3B;"55CM\oʢ476D$'3r}Y"Ti뻶i4 Xx՜t&L+L`zKVUvj]q_PRf<8c~`2R>#}Ȅ}`c}`F]6w-"^MċiQZš;śխZoj|';^mc_YU3L߻w/p^(ɤhVM%ȓHuMN*g\.9o/Rz޹r\ _7B+"b2t],K4ȢH={[R}߿_/ιRҊ~)R1%ɳ٬TflQq/ Irs yKecͦ6uUVv;L2 =/¤_eYMf$:ۦYK.ֶH"=v%!xq4Jkϟ mCj"/<{ IGwMyvbqg8T{_4fy^27@mѵ^%`;]*^lkۦi}eij0]!~F.Wy/wΦz旗/f IjgKd5PBu4ѭy6LcrNjkMGyMm6Fd)]Q{c\_VjB̦ӉPUfݬV{?>~Wyz&aB=M8&(m煫ȼX|_^4fItmYŜtK=,͕_\:+;9)NE5=Yv-YbqۖI5Ѥ U_+!fⳆKuޙp!ј;M<>Hmi&4}7燇="t&*^:ZO3<㺮?=|5KJ619}O3 KN k±,dp ܡ_}ߩZM"%|C=F4Or4mZc0OT7Ϻڕýo`yA-AD Y.eKQBf!"FnZ3[_ftX´ lս̑}IȊ#aGGG_\1^Cv^cW5i5|  c]?E!~Sxk,a?CwԍY:W3(no+lp+*-"2㊳>> from logbook import TestHandler, Logger >>> logger = Logger('Testing') >>> handler = TestHandler() >>> handler.push_thread() >>> logger.warn('Hello World') >>> handler.records [] >>> handler.formatted_records [u'[WARNING] Testing: Hello World'] .. _probe-log-records: Probe Log Records ----------------- The handler also provide some convenience methods to do assertions: >>> handler.has_warnings True >>> handler.has_errors False >>> handler.has_warning('Hello World') True Methods like :meth:`~logbook.TestHandler.has_warning` accept two arguments: `message` If provided and not `None` it will check if there is at least one log record where the message matches. This can also be a compiled regular expression. `channel` If provided and not `None` it will check if there is at least one log record where the logger name of the record matches. Example usage: >>> handler.has_warning('A different message') False >>> handler.has_warning(re.compile('^Hello')) True >>> handler.has_warning('Hello World', channel='Testing') True >>> handler.has_warning(channel='Testing') True logbook-1.5.3/logbook/000077500000000000000000000000001355165376200146255ustar00rootroot00000000000000logbook-1.5.3/logbook/__init__.py000066400000000000000000000033541355165376200167430ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ logbook ~~~~~~~ Simple logging library that aims to support desktop, command line and web applications alike. :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ import os from .base import ( LogRecord, Logger, LoggerGroup, NestedSetup, Processor, Flags, get_level_name, lookup_level, dispatch_record, CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, TRACE, NOTSET, set_datetime_format) from .handlers import ( Handler, StreamHandler, FileHandler, MonitoringFileHandler, StderrHandler, RotatingFileHandler, TimedRotatingFileHandler, TestHandler, MailHandler, GMailHandler, SyslogHandler, NullHandler, NTEventLogHandler, create_syshandler, StringFormatter, StringFormatterHandlerMixin, HashingHandlerMixin, LimitingHandlerMixin, WrapperHandler, FingersCrossedHandler, GroupHandler, GZIPCompressionHandler, BrotliCompressionHandler) from . import compat # create an anonymous default logger and provide all important # methods of that logger as global functions _default_logger = Logger('Generic') _default_logger.suppress_dispatcher = True trace = _default_logger.trace debug = _default_logger.debug info = _default_logger.info warn = _default_logger.warn warning = _default_logger.warning notice = _default_logger.notice error = _default_logger.error exception = _default_logger.exception catch_exceptions = _default_logger.catch_exceptions critical = _default_logger.critical log = _default_logger.log del _default_logger # install a default global handler if os.environ.get('LOGBOOK_INSTALL_DEFAULT_HANDLER'): default_handler = StderrHandler() default_handler.push_application() from .__version__ import __version__ logbook-1.5.3/logbook/__version__.py000066400000000000000000000000261355165376200174560ustar00rootroot00000000000000__version__ = "1.5.3" logbook-1.5.3/logbook/_fallback.py000066400000000000000000000176601355165376200171070ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ logbook._fallback ~~~~~~~~~~~~~~~~~ Fallback implementations in case speedups is not around. :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ from itertools import count from logbook.helpers import get_iterator_next_method from logbook.concurrency import ( thread_get_ident, greenlet_get_ident, thread_local, greenlet_local, ThreadLock, GreenletRLock, is_gevent_enabled, ContextVar, context_get_ident, is_context_enabled) _missing = object() _MAX_CONTEXT_OBJECT_CACHE = 256 def group_reflected_property(name, default, fallback=_missing): """Returns a property for a given name that falls back to the value of the group if set. If there is no such group, the provided default is used. """ def _get(self): rv = getattr(self, '_' + name, _missing) if rv is not _missing and rv != fallback: return rv if self.group is None: return default return getattr(self.group, name) def _set(self, value): setattr(self, '_' + name, value) def _del(self): delattr(self, '_' + name) return property(_get, _set, _del) class _StackBound(object): def __init__(self, obj, push, pop): self.__obj = obj self.__push = push self.__pop = pop def __enter__(self): self.__push() return self.__obj def __exit__(self, exc_type, exc_value, tb): self.__pop() class StackedObject(object): """Baseclass for all objects that provide stack manipulation operations. """ def push_greenlet(self): """Pushes the stacked object to the greenlet stack.""" raise NotImplementedError() def pop_greenlet(self): """Pops the stacked object from the greenlet stack.""" raise NotImplementedError() def push_context(self): """Pushes the stacked object to the context stack.""" raise NotImplementedError() def pop_context(self): """Pops the stacked object from the context stack.""" raise NotImplementedError() def push_thread(self): """Pushes the stacked object to the thread stack.""" raise NotImplementedError() def pop_thread(self): """Pops the stacked object from the thread stack.""" raise NotImplementedError() def push_application(self): """Pushes the stacked object to the application stack.""" raise NotImplementedError() def pop_application(self): """Pops the stacked object from the application stack.""" raise NotImplementedError() def __enter__(self): if is_gevent_enabled(): self.push_greenlet() else: self.push_thread() return self def __exit__(self, exc_type, exc_value, tb): if is_gevent_enabled(): self.pop_greenlet() else: self.pop_thread() def greenletbound(self, _cls=_StackBound): """Can be used in combination with the `with` statement to execute code while the object is bound to the greenlet. """ return _cls(self, self.push_greenlet, self.pop_greenlet) def contextbound(self, _cls=_StackBound): """Can be used in combination with the `with` statement to execute code while the object is bound to the concurrent context. """ return _cls(self, self.push_context, self.pop_context) def threadbound(self, _cls=_StackBound): """Can be used in combination with the `with` statement to execute code while the object is bound to the thread. """ return _cls(self, self.push_thread, self.pop_thread) def applicationbound(self, _cls=_StackBound): """Can be used in combination with the `with` statement to execute code while the object is bound to the application. """ return _cls(self, self.push_application, self.pop_application) class ContextStackManager(object): """Helper class for context objects that manages a stack of objects. """ def __init__(self): self._global = [] self._thread_context_lock = ThreadLock() self._thread_context = thread_local() self._greenlet_context_lock = GreenletRLock() self._greenlet_context = greenlet_local() self._context_stack = ContextVar('stack') self._cache = {} self._stackop = get_iterator_next_method(count()) def iter_context_objects(self): """Returns an iterator over all objects for the combined application and context cache. """ use_gevent = is_gevent_enabled() use_context = is_context_enabled() if use_gevent: tid = greenlet_get_ident() elif use_context: tid = context_get_ident() else: tid = thread_get_ident() objects = self._cache.get(tid) if objects is None: if len(self._cache) > _MAX_CONTEXT_OBJECT_CACHE: self._cache.clear() objects = self._global[:] objects.extend(getattr(self._thread_context, 'stack', ())) if use_gevent: objects.extend(getattr(self._greenlet_context, 'stack', ())) if use_context: objects.extend(self._context_stack.get([])) objects.sort(reverse=True) objects = [x[1] for x in objects] self._cache[tid] = objects return iter(objects) def push_greenlet(self, obj): self._greenlet_context_lock.acquire() try: # remote chance to conflict with thread ids self._cache.pop(greenlet_get_ident(), None) item = (self._stackop(), obj) stack = getattr(self._greenlet_context, 'stack', None) if stack is None: self._greenlet_context.stack = [item] else: stack.append(item) finally: self._greenlet_context_lock.release() def pop_greenlet(self): self._greenlet_context_lock.acquire() try: # remote chance to conflict with thread ids self._cache.pop(greenlet_get_ident(), None) stack = getattr(self._greenlet_context, 'stack', None) assert stack, 'no objects on stack' return stack.pop()[1] finally: self._greenlet_context_lock.release() def push_context(self, obj): self._cache.pop(context_get_ident(), None) item = (self._stackop(), obj) stack = self._context_stack.get(None) if stack is None: stack = [item] self._context_stack.set(stack) else: stack.append(item) def pop_context(self): self._cache.pop(context_get_ident(), None) stack = self._context_stack.get(None) assert stack, 'no objects on stack' return stack.pop()[1] def push_thread(self, obj): self._thread_context_lock.acquire() try: self._cache.pop(thread_get_ident(), None) item = (self._stackop(), obj) stack = getattr(self._thread_context, 'stack', None) if stack is None: self._thread_context.stack = [item] else: stack.append(item) finally: self._thread_context_lock.release() def pop_thread(self): self._thread_context_lock.acquire() try: self._cache.pop(thread_get_ident(), None) stack = getattr(self._thread_context, 'stack', None) assert stack, 'no objects on stack' return stack.pop()[1] finally: self._thread_context_lock.release() def push_application(self, obj): self._global.append((self._stackop(), obj)) self._cache.clear() def pop_application(self): assert self._global, 'no objects on application stack' popped = self._global.pop()[1] self._cache.clear() return popped logbook-1.5.3/logbook/_speedups.pyx000066400000000000000000000224101355165376200173550ustar00rootroot00000000000000# -*- coding: utf-8 -*- # cython: language_level=2 """ logbook._speedups ~~~~~~~~~~~~~~~~~ Cython implementation of some core objects. :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ from logbook.concurrency import (is_gevent_enabled, thread_get_ident, greenlet_get_ident, thread_local, GreenletRLock, greenlet_local, ContextVar, context_get_ident, is_context_enabled) from cpython.dict cimport PyDict_Clear, PyDict_SetItem from cpython.list cimport PyList_Append, PyList_Sort, PyList_GET_SIZE from cpython.pythread cimport PyThread_type_lock, PyThread_allocate_lock, \ PyThread_release_lock, PyThread_acquire_lock, WAIT_LOCK _missing = object() cdef enum: _MAX_CONTEXT_OBJECT_CACHE = 256 cdef class group_reflected_property: cdef object name cdef object _name cdef object default cdef object fallback def __init__(self, name, object default, object fallback=_missing): self.name = name self._name = '_' + name self.default = default self.fallback = fallback def __get__(self, obj, type): if obj is None: return self rv = getattr(obj, self._name, _missing) if rv is not _missing and rv != self.fallback: return rv if obj.group is None: return self.default return getattr(obj.group, self.name) def __set__(self, obj, value): setattr(obj, self._name, value) def __del__(self, obj): delattr(obj, self._name) cdef class _StackItem: cdef int id cdef readonly object val def __init__(self, int id, object val): self.id = id self.val = val def __richcmp__(_StackItem self, _StackItem other, int op): cdef int diff = other.id - self.id # preserving older code if op == 0: # < return diff < 0 if op == 1: # <= return diff <= 0 if op == 2: # == return diff == 0 if op == 3: # != return diff != 0 if op == 4: # > return diff > 0 if op == 5: # >= return diff >= 0 assert False, "should never get here" cdef class _StackBound: cdef object obj cdef object push_func cdef object pop_func def __init__(self, obj, push, pop): self.obj = obj self.push_func = push self.pop_func = pop def __enter__(self): self.push_func() return self.obj def __exit__(self, exc_type, exc_value, tb): self.pop_func() cdef class StackedObject: """Base class for all objects that provide stack manipulation operations. """ cpdef push_context(self): """Pushes the stacked object to the asyncio (via contextvar) stack.""" raise NotImplementedError() cpdef pop_context(self): """Pops the stacked object from the asyncio (via contextvar) stack.""" raise NotImplementedError() cpdef push_greenlet(self): """Pushes the stacked object to the greenlet stack.""" raise NotImplementedError() cpdef pop_greenlet(self): """Pops the stacked object from the greenlet stack.""" raise NotImplementedError() cpdef push_thread(self): """Pushes the stacked object to the thread stack.""" raise NotImplementedError() cpdef pop_thread(self): """Pops the stacked object from the thread stack.""" raise NotImplementedError() cpdef push_application(self): """Pushes the stacked object to the application stack.""" raise NotImplementedError() cpdef pop_application(self): """Pops the stacked object from the application stack.""" raise NotImplementedError() def __enter__(self): if is_gevent_enabled(): self.push_greenlet() else: self.push_thread() return self def __exit__(self, exc_type, exc_value, tb): if is_gevent_enabled(): self.pop_greenlet() else: self.pop_thread() cpdef greenletbound(self): """Can be used in combination with the `with` statement to execute code while the object is bound to the greenlet. """ return _StackBound(self, self.push_greenlet, self.pop_greenlet) cpdef threadbound(self): """Can be used in combination with the `with` statement to execute code while the object is bound to the thread. """ return _StackBound(self, self.push_thread, self.pop_thread) cpdef applicationbound(self): """Can be used in combination with the `with` statement to execute code while the object is bound to the application. """ return _StackBound(self, self.push_application, self.pop_application) cpdef contextbound(self): """Can be used in combination with the `with` statement to execute code while the object is bound to the asyncio context. """ return _StackBound(self, self.push_context, self.pop_context) cdef class ContextStackManager: cdef list _global cdef PyThread_type_lock _thread_context_lock cdef object _thread_context cdef object _greenlet_context_lock cdef object _greenlet_context cdef object _context_stack cdef dict _cache cdef int _stackcnt def __init__(self): self._global = [] self._thread_context_lock = PyThread_allocate_lock() self._thread_context = thread_local() self._greenlet_context_lock = GreenletRLock() self._greenlet_context = greenlet_local() self._context_stack = ContextVar('stack') self._cache = {} self._stackcnt = 0 cdef _stackop(self): self._stackcnt += 1 return self._stackcnt cpdef iter_context_objects(self): use_gevent = is_gevent_enabled() use_context = is_context_enabled() if use_gevent: tid = greenlet_get_ident() elif use_context: tid = context_get_ident() else: tid = thread_get_ident() objects = self._cache.get(tid) if objects is None: if PyList_GET_SIZE(self._cache) > _MAX_CONTEXT_OBJECT_CACHE: PyDict_Clear(self._cache) objects = self._global[:] objects.extend(getattr(self._thread_context, 'stack', ())) if use_gevent: objects.extend(getattr(self._greenlet_context, 'stack', ())) if use_context: objects.extend(self._context_stack.get([])) PyList_Sort(objects) objects = [(<_StackItem>x).val for x in objects] PyDict_SetItem(self._cache, tid, objects) return iter(objects) cpdef push_greenlet(self, obj): self._greenlet_context_lock.acquire() try: self._cache.pop(greenlet_get_ident(), None) item = _StackItem(self._stackop(), obj) stack = getattr(self._greenlet_context, 'stack', None) if stack is None: self._greenlet_context.stack = [item] else: PyList_Append(stack, item) finally: self._greenlet_context_lock.release() cpdef pop_greenlet(self): self._greenlet_context_lock.acquire() try: self._cache.pop(greenlet_get_ident(), None) stack = getattr(self._greenlet_context, 'stack', None) assert stack, 'no objects on stack' return (<_StackItem>stack.pop()).val finally: self._greenlet_context_lock.release() cpdef push_context(self, obj): self._cache.pop(context_get_ident(), None) item = _StackItem(self._stackop(), obj) stack = self._context_stack.get(None) if stack is None: stack = [item] self._context_stack.set(stack) else: PyList_Append(stack, item) cpdef pop_context(self): self._cache.pop(context_get_ident(), None) stack = self._context_stack.get(None) assert stack, 'no objects on stack' return (<_StackItem>stack.pop()).val cpdef push_thread(self, obj): PyThread_acquire_lock(self._thread_context_lock, WAIT_LOCK) try: self._cache.pop(thread_get_ident(), None) item = _StackItem(self._stackop(), obj) stack = getattr(self._thread_context, 'stack', None) if stack is None: self._thread_context.stack = [item] else: PyList_Append(stack, item) finally: PyThread_release_lock(self._thread_context_lock) cpdef pop_thread(self): PyThread_acquire_lock(self._thread_context_lock, WAIT_LOCK) try: self._cache.pop(thread_get_ident(), None) stack = getattr(self._thread_context, 'stack', None) assert stack, 'no objects on stack' return (<_StackItem>stack.pop()).val finally: PyThread_release_lock(self._thread_context_lock) cpdef push_application(self, obj): self._global.append(_StackItem(self._stackop(), obj)) PyDict_Clear(self._cache) cpdef pop_application(self): assert self._global, 'no objects on application stack' popped = (<_StackItem>self._global.pop()).val PyDict_Clear(self._cache) return popped logbook-1.5.3/logbook/_termcolors.py000066400000000000000000000021621355165376200175300ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ logbook._termcolors ~~~~~~~~~~~~~~~~~~~ Provides terminal color mappings. :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ esc = "\x1b[" codes = {"": "", "reset": esc + "39;49;00m"} dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue", "purple", "teal", "lightgray"] light_colors = ["darkgray", "red", "green", "yellow", "blue", "fuchsia", "turquoise", "white"] x = 30 for d, l in zip(dark_colors, light_colors): codes[d] = esc + "%im" % x codes[l] = esc + "%i;01m" % x x += 1 del d, l, x codes["darkteal"] = codes["turquoise"] codes["darkyellow"] = codes["brown"] codes["fuscia"] = codes["fuchsia"] def _str_to_type(obj, strtype): """Helper for ansiformat and colorize""" if isinstance(obj, type(strtype)): return obj return obj.encode('ascii') def colorize(color_key, text): """Returns an ANSI formatted text with the given color.""" return (_str_to_type(codes[color_key], text) + text + _str_to_type(codes["reset"], text)) logbook-1.5.3/logbook/base.py000066400000000000000000001206001355165376200161100ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ logbook.base ~~~~~~~~~~~~ Base implementation for logbook. :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ import os import sys import traceback from collections import defaultdict from datetime import datetime from itertools import chain from weakref import ref as weakref from logbook.concurrency import (greenlet_get_ident, thread_get_ident, thread_get_name) from logbook.helpers import (PY2, cached_property, integer_types, iteritems, parse_iso8601, string_types, to_safe_json, u, xrange) _has_speedups = False try: if os.environ.get('DISABLE_LOGBOOK_CEXT_AT_RUNTIME'): raise ImportError("Speedups disabled via DISABLE_LOGBOOK_CEXT_AT_RUNTIME") from logbook._speedups import ( _missing, group_reflected_property, ContextStackManager, StackedObject) _has_speedups = True except ImportError: from logbook._fallback import ( _missing, group_reflected_property, ContextStackManager, StackedObject) _datetime_factory = datetime.utcnow def set_datetime_format(datetime_format): """ Set the format for the datetime objects created, which are then made available as the :py:attr:`LogRecord.time` attribute of :py:class:`LogRecord` instances. :param datetime_format: Indicates how to generate datetime objects. Possible values are: "utc" :py:attr:`LogRecord.time` will be a datetime in UTC time zone (but not time zone aware) "local" :py:attr:`LogRecord.time` will be a datetime in local time zone (but not time zone aware) A `callable` returning datetime instances :py:attr:`LogRecord.time` will be a datetime created by :py:obj:`datetime_format` (possibly time zone aware) This function defaults to creating datetime objects in UTC time, using `datetime.utcnow() `_, so that logbook logs all times in UTC time by default. This is recommended in case you have multiple software modules or instances running in different servers in different time zones, as it makes it simple and less error prone to correlate logging across the different servers. On the other hand if all your software modules are running in the same time zone and you have to correlate logging with third party modules already logging in local time, it can be more convenient to have logbook logging to local time instead of UTC. Local time logging can be enabled like this:: import logbook from datetime import datetime logbook.set_datetime_format("local") Other uses rely on your supplied :py:obj:`datetime_format`. Using `pytz `_ for example:: from datetime import datetime import logbook import pytz def utc_tz(): return datetime.now(tz=pytz.utc) logbook.set_datetime_format(utc_tz) """ global _datetime_factory if datetime_format == "utc": _datetime_factory = datetime.utcnow elif datetime_format == "local": _datetime_factory = datetime.now elif callable(datetime_format): inst = datetime_format() if not isinstance(inst, datetime): raise ValueError("Invalid callable value, valid callable " "should return datetime.datetime instances, " "not %r" % (type(inst),)) _datetime_factory = datetime_format else: raise ValueError("Invalid value %r. Valid values are 'utc' and " "'local'." % (datetime_format,)) # make sure to sync these up with _speedups.pyx CRITICAL = 15 ERROR = 14 WARNING = 13 NOTICE = 12 INFO = 11 DEBUG = 10 TRACE = 9 NOTSET = 0 _level_names = { CRITICAL: 'CRITICAL', ERROR: 'ERROR', WARNING: 'WARNING', NOTICE: 'NOTICE', INFO: 'INFO', DEBUG: 'DEBUG', TRACE: 'TRACE', NOTSET: 'NOTSET' } _reverse_level_names = dict((v, k) for (k, v) in iteritems(_level_names)) _missing = object() # on python 3 we can savely assume that frame filenames will be in # unicode, on Python 2 we have to apply a trick. if PY2: def _convert_frame_filename(fn): if isinstance(fn, unicode): fn = fn.decode(sys.getfilesystemencoding() or 'utf-8', 'replace') return fn else: def _convert_frame_filename(fn): return fn def level_name_property(): """Returns a property that reflects the level as name from the internal level attribute. """ def _get_level_name(self): return get_level_name(self.level) def _set_level_name(self, level): self.level = lookup_level(level) return property(_get_level_name, _set_level_name, doc='The level as unicode string') def lookup_level(level): """Return the integer representation of a logging level.""" if isinstance(level, integer_types): return level try: return _reverse_level_names[level] except KeyError: raise LookupError('unknown level name %s' % level) def get_level_name(level): """Return the textual representation of logging level 'level'.""" try: return _level_names[level] except KeyError: raise LookupError('unknown level') class _ExceptionCatcher(object): """Helper for exception caught blocks.""" def __init__(self, logger, args, kwargs): self.logger = logger self.args = args self.kwargs = kwargs def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): if exc_type is not None: kwargs = self.kwargs.copy() kwargs['exc_info'] = (exc_type, exc_value, tb) self.logger.exception(*self.args, **kwargs) return True class ContextObject(StackedObject): """An object that can be bound to a context. It is managed by the :class:`ContextStackManager`""" #: subclasses have to instanciate a :class:`ContextStackManager` #: object on this attribute which is then shared for all the #: subclasses of it. stack_manager = None def push_greenlet(self): """Pushes the context object to the greenlet stack.""" self.stack_manager.push_greenlet(self) def pop_greenlet(self): """Pops the context object from the stack.""" popped = self.stack_manager.pop_greenlet() assert popped is self, 'popped unexpected object' def push_context(self): """Pushes the context object to the context stack.""" self.stack_manager.push_context(self) def pop_context(self): """Pops the context object from the stack.""" popped = self.stack_manager.pop_context() assert popped is self, 'popped unexpected object' def push_thread(self): """Pushes the context object to the thread stack.""" self.stack_manager.push_thread(self) def pop_thread(self): """Pops the context object from the stack.""" popped = self.stack_manager.pop_thread() assert popped is self, 'popped unexpected object' def push_application(self): """Pushes the context object to the application stack.""" self.stack_manager.push_application(self) def pop_application(self): """Pops the context object from the stack.""" popped = self.stack_manager.pop_application() assert popped is self, 'popped unexpected object' class NestedSetup(StackedObject): """A nested setup can be used to configure multiple handlers and processors at once. """ def __init__(self, objects=None): self.objects = list(objects or ()) def push_application(self): for obj in self.objects: obj.push_application() def pop_application(self): for obj in reversed(self.objects): obj.pop_application() def push_thread(self): for obj in self.objects: obj.push_thread() def pop_thread(self): for obj in reversed(self.objects): obj.pop_thread() def push_greenlet(self): for obj in self.objects: obj.push_greenlet() def pop_greenlet(self): for obj in reversed(self.objects): obj.pop_greenlet() def push_context(self): for obj in self.objects: obj.push_context() def pop_context(self): for obj in reversed(self.objects): obj.pop_context() class Processor(ContextObject): """Can be pushed to a stack to inject additional information into a log record as necessary:: def inject_ip(record): record.extra['ip'] = '127.0.0.1' with Processor(inject_ip): ... """ stack_manager = ContextStackManager() def __init__(self, callback=None): #: the callback that was passed to the constructor self.callback = callback def process(self, record): """Called with the log record that should be overridden. The default implementation calls :attr:`callback` if it is not `None`. """ if self.callback is not None: self.callback(record) class _InheritedType(object): __slots__ = () def __repr__(self): return 'Inherit' def __reduce__(self): return 'Inherit' Inherit = _InheritedType() class Flags(ContextObject): """Allows flags to be pushed on a flag stack. Currently two flags are available: `errors` Can be set to override the current error behaviour. This value is used when logging calls fail. The default behaviour is spitting out the stacktrace to stderr but this can be overridden: =================== ========================================== ``'silent'`` fail silently ``'raise'`` raise a catchable exception ``'print'`` print the stacktrace to stderr (default) =================== ========================================== `introspection` Can be used to disable frame introspection. This can give a speedup on production systems if you are using a JIT compiled Python interpreter such as pypy. The default is `True`. Note that the default setup of some of the handler (mail for instance) includes frame dependent information which will not be available when introspection is disabled. Example usage:: with Flags(errors='silent'): ... """ stack_manager = ContextStackManager() def __init__(self, **flags): self.__dict__.update(flags) @staticmethod def get_flag(flag, default=None): """Looks up the current value of a specific flag.""" for flags in Flags.stack_manager.iter_context_objects(): val = getattr(flags, flag, Inherit) if val is not Inherit: return val return default def _create_log_record(cls, dict): """Extra function for reduce because on Python 3 unbound methods can no longer be pickled. """ return cls.from_dict(dict) class LogRecord(object): """A LogRecord instance represents an event being logged. LogRecord instances are created every time something is logged. They contain all the information pertinent to the event being logged. The main information passed in is in msg and args """ _pullable_information = frozenset(( 'func_name', 'module', 'filename', 'lineno', 'process_name', 'thread', 'thread_name', 'greenlet', 'formatted_exception', 'message', 'exception_name', 'exception_message' )) _noned_on_close = frozenset(('exc_info', 'frame', 'calling_frame')) #: can be overriden by a handler to not close the record. This could #: lead to memory leaks so it should be used carefully. keep_open = False #: the time of the log record creation as :class:`datetime.datetime` #: object. This information is unavailable until the record was #: heavy initialized. time = None #: a flag that is `True` if the log record is heavy initialized which #: is not the case by default. heavy_initialized = False #: a flag that is `True` when heavy initialization is no longer possible late = False #: a flag that is `True` when all the information was pulled from the #: information that becomes unavailable on close. information_pulled = False def __init__(self, channel, level, msg, args=None, kwargs=None, exc_info=None, extra=None, frame=None, dispatcher=None, frame_correction=0): #: the name of the logger that created it or any other textual #: channel description. This is a descriptive name and can be #: used for filtering. self.channel = channel #: The message of the log record as new-style format string. self.msg = msg #: the positional arguments for the format string. self.args = args or () #: the keyword arguments for the format string. self.kwargs = kwargs or {} #: the level of the log record as integer. self.level = level #: optional exception information. If set, this is a tuple in the #: form ``(exc_type, exc_value, tb)`` as returned by #: :func:`sys.exc_info`. #: This parameter can also be ``True``, which would cause the exception #: info tuple to be fetched for you. if not exc_info: # this is a special case where exc_info=False can be passed in # theory, and it should be the same as exc_info=None exc_info = None self.exc_info = exc_info #: optional extra information as dictionary. This is the place #: where custom log processors can attach custom context sensitive #: data. # TODO: Replace the lambda with str when we remove support for python 2 self.extra = defaultdict(lambda: u'', extra or ()) #: If available, optionally the interpreter frame that pulled the #: heavy init. This usually points to somewhere in the dispatcher. #: Might not be available for all calls and is removed when the log #: record is closed. self.frame = frame #: A positive integer telling the number of frames to go back from #: the frame which triggered the log entry. This is mainly useful #: for decorators that want to show that the log was emitted from #: form the function they decorate self.frame_correction = frame_correction #: the PID of the current process self.process = None if dispatcher is not None: dispatcher = weakref(dispatcher) self._dispatcher = dispatcher def heavy_init(self): """Does the heavy initialization that could be expensive. This must not be called from a higher stack level than when the log record was created and the later the initialization happens, the more off the date information will be for example. This is internally used by the record dispatching system and usually something not to worry about. """ if self.heavy_initialized: return assert not self.late, 'heavy init is no longer possible' self.heavy_initialized = True self.process = os.getpid() self.time = _datetime_factory() if self.frame is None and Flags.get_flag('introspection', True): self.frame = sys._getframe(1) if self.exc_info is True: self.exc_info = sys.exc_info() def pull_information(self): """A helper function that pulls all frame-related information into the object so that this information is available after the log record was closed. """ if self.information_pulled: return # due to how cached_property is implemented, the attribute access # has the side effect of caching the attribute on the instance of # the class. for key in self._pullable_information: getattr(self, key) self.information_pulled = True def close(self): """Closes the log record. This will set the frame and calling frame to `None` and frame-related information will no longer be available unless it was pulled in first (:meth:`pull_information`). This makes a log record safe for pickling and will clean up memory that might be still referenced by the frames. """ for key in self._noned_on_close: setattr(self, key, None) self.late = True def __reduce_ex__(self, protocol): return _create_log_record, (type(self), self.to_dict()) def to_dict(self, json_safe=False): """Exports the log record into a dictionary without the information that cannot be safely serialized like interpreter frames and tracebacks. """ self.pull_information() rv = {} for key, value in iteritems(self.__dict__): if key[:1] != '_' and key not in self._noned_on_close: rv[key] = value # the extra dict is exported as regular dict rv['extra'] = dict(rv['extra']) if json_safe: return to_safe_json(rv) return rv @classmethod def from_dict(cls, d): """Creates a log record from an exported dictionary. This also supports JSON exported dictionaries. """ rv = object.__new__(cls) rv.update_from_dict(d) return rv def update_from_dict(self, d): """Like the :meth:`from_dict` classmethod, but will update the instance in place. Helpful for constructors. """ self.__dict__.update(d) for key in self._noned_on_close: setattr(self, key, None) self._information_pulled = True self._channel = None if isinstance(self.time, string_types): self.time = parse_iso8601(self.time) # TODO: Replace the lambda with str when we remove support for python 2` self.extra = defaultdict(lambda: u'', self.extra) return self def _format_message(self, msg, *args, **kwargs): """Called if the record's message needs to be formatted. Subclasses can implement their own formatting. """ return msg.format(*args, **kwargs) @cached_property def message(self): """The formatted message.""" if not (self.args or self.kwargs): return self.msg try: try: return self._format_message(self.msg, *self.args, **self.kwargs) except UnicodeDecodeError: # Assume an unicode message but mixed-up args msg = self.msg.encode('utf-8', 'replace') return self._format_message(msg, *self.args, **self.kwargs) except (UnicodeEncodeError, AttributeError): # we catch AttributeError since if msg is bytes, # it won't have the 'format' method if (sys.exc_info()[0] is AttributeError and (PY2 or not isinstance(self.msg, bytes))): # this is not the case we thought it is... raise # Assume encoded message with unicode args. # The assumption of utf8 as input encoding is just a guess, # but this codepath is unlikely (if the message is a constant # string in the caller's source file) msg = self.msg.decode('utf-8', 'replace') return self._format_message(msg, *self.args, **self.kwargs) except Exception: # this obviously will not give a proper error message if the # information was not pulled and the log record no longer has # access to the frame. But there is not much we can do about # that. e = sys.exc_info()[1] errormsg = ('Could not format message with provided ' 'arguments: {err}\n msg={msg!r}\n ' 'args={args!r} \n kwargs={kwargs!r}.\n' 'Happened in file {file}, line {lineno}').format( err=e, msg=self.msg, args=self.args, kwargs=self.kwargs, file=self.filename, lineno=self.lineno ) if PY2: errormsg = errormsg.encode('utf-8') raise TypeError(errormsg) level_name = level_name_property() @cached_property def calling_frame(self): """The frame in which the record has been created. This only exists for as long the log record is not closed. """ frm = self.frame globs = globals() while frm is not None and frm.f_globals is globs: frm = frm.f_back for _ in xrange(self.frame_correction): if frm is None: break frm = frm.f_back return frm @cached_property def func_name(self): """The name of the function that triggered the log call if available. Requires a frame or that :meth:`pull_information` was called before. """ cf = self.calling_frame if cf is not None: return cf.f_code.co_name @cached_property def module(self): """The name of the module that triggered the log call if available. Requires a frame or that :meth:`pull_information` was called before. """ cf = self.calling_frame if cf is not None: return cf.f_globals.get('__name__') @cached_property def filename(self): """The filename of the module in which the record has been created. Requires a frame or that :meth:`pull_information` was called before. """ cf = self.calling_frame if cf is not None: fn = cf.f_code.co_filename if fn[:1] == '<' and fn[-1:] == '>': return fn return _convert_frame_filename(os.path.abspath(fn)) @cached_property def lineno(self): """The line number of the file in which the record has been created. Requires a frame or that :meth:`pull_information` was called before. """ cf = self.calling_frame if cf is not None: return cf.f_lineno @cached_property def greenlet(self): """The ident of the greenlet. This is evaluated late and means that if the log record is passed to another greenlet, :meth:`pull_information` was called in the old greenlet. """ return greenlet_get_ident() @cached_property def thread(self): """The ident of the thread. This is evaluated late and means that if the log record is passed to another thread, :meth:`pull_information` was called in the old thread. """ return thread_get_ident() @cached_property def thread_name(self): """The name of the thread. This is evaluated late and means that if the log record is passed to another thread, :meth:`pull_information` was called in the old thread. """ return thread_get_name() @cached_property def process_name(self): """The name of the process in which the record has been created.""" # Errors may occur if multiprocessing has not finished loading # yet - e.g. if a custom import hook causes third-party code # to run when multiprocessing calls import. See issue 8200 # for an example mp = sys.modules.get('multiprocessing') if mp is not None: # pragma: no cover try: return mp.current_process().name except Exception: pass @cached_property def formatted_exception(self): """The formatted exception which caused this record to be created in case there was any. """ if self.exc_info is not None and self.exc_info != (None, None, None): rv = ''.join(traceback.format_exception(*self.exc_info)) if PY2: rv = rv.decode('utf-8', 'replace') return rv.rstrip() @cached_property def exception_name(self): """The name of the exception.""" if self.exc_info is not None: cls = self.exc_info[0] return u(cls.__module__ + '.' + cls.__name__) @property def exception_shortname(self): """An abbreviated exception name (no import path)""" return self.exception_name.rsplit('.')[-1] @cached_property def exception_message(self): """The message of the exception.""" if self.exc_info is not None: val = self.exc_info[1] try: if PY2: return unicode(val) else: return str(val) except UnicodeError: return str(val).decode('utf-8', 'replace') @property def dispatcher(self): """The dispatcher that created the log record. Might not exist because a log record does not have to be created from a logger or other dispatcher to be handled by logbook. If this is set, it will point to an object that implements the :class:`~logbook.base.RecordDispatcher` interface. """ if self._dispatcher is not None: return self._dispatcher() class LoggerMixin(object): """This mixin class defines and implements the "usual" logger interface (i.e. the descriptive logging functions). Classes using this mixin have to implement a :meth:`!handle` method which takes a :class:`~logbook.LogRecord` and passes it along. """ #: The name of the minimium logging level required for records to be #: created. level_name = level_name_property() def trace(self, *args, **kwargs): """Logs a :class:`~logbook.LogRecord` with the level set to :data:`~logbook.TRACE`. """ if not self.disabled and TRACE >= self.level: self._log(TRACE, args, kwargs) def debug(self, *args, **kwargs): """Logs a :class:`~logbook.LogRecord` with the level set to :data:`~logbook.DEBUG`. """ if not self.disabled and DEBUG >= self.level: self._log(DEBUG, args, kwargs) def info(self, *args, **kwargs): """Logs a :class:`~logbook.LogRecord` with the level set to :data:`~logbook.INFO`. """ if not self.disabled and INFO >= self.level: self._log(INFO, args, kwargs) def warn(self, *args, **kwargs): """Logs a :class:`~logbook.LogRecord` with the level set to :data:`~logbook.WARNING`. This function has an alias named :meth:`warning`. """ if not self.disabled and WARNING >= self.level: self._log(WARNING, args, kwargs) def warning(self, *args, **kwargs): """Alias for :meth:`warn`.""" return self.warn(*args, **kwargs) def notice(self, *args, **kwargs): """Logs a :class:`~logbook.LogRecord` with the level set to :data:`~logbook.NOTICE`. """ if not self.disabled and NOTICE >= self.level: self._log(NOTICE, args, kwargs) def error(self, *args, **kwargs): """Logs a :class:`~logbook.LogRecord` with the level set to :data:`~logbook.ERROR`. """ if not self.disabled and ERROR >= self.level: self._log(ERROR, args, kwargs) def exception(self, *args, **kwargs): """Works exactly like :meth:`error` just that the message is optional and exception information is recorded. """ if self.disabled or ERROR < self.level: return if not args: args = ('Uncaught exception occurred',) if 'exc_info' not in kwargs: exc_info = sys.exc_info() assert exc_info[0] is not None, 'no exception occurred' kwargs.setdefault('exc_info', sys.exc_info()) return self.error(*args, **kwargs) def critical(self, *args, **kwargs): """Logs a :class:`~logbook.LogRecord` with the level set to :data:`~logbook.CRITICAL`. """ if not self.disabled and CRITICAL >= self.level: self._log(CRITICAL, args, kwargs) def log(self, level, *args, **kwargs): """Logs a :class:`~logbook.LogRecord` with the level set to the `level` parameter. Because custom levels are not supported by logbook, this method is mainly used to avoid the use of reflection (e.g.: :func:`getattr`) for programmatic logging. """ level = lookup_level(level) if level >= self.level: self._log(level, args, kwargs) def catch_exceptions(self, *args, **kwargs): """A context manager that catches exceptions and calls :meth:`exception` for exceptions caught that way. Example: .. code-block:: python with logger.catch_exceptions(): execute_code_that_might_fail() """ if not args: args = ('Uncaught exception occurred',) return _ExceptionCatcher(self, args, kwargs) def enable(self): """Convenience method to enable this logger. :raises AttributeError: The disabled property is read-only, typically because it was overridden in a subclass. .. versionadded:: 1.0 """ try: self.disabled = False except AttributeError: raise AttributeError('The disabled property is read-only.') def disable(self): """Convenience method to disable this logger. :raises AttributeError: The disabled property is read-only, typically because it was overridden in a subclass. .. versionadded:: 1.0 """ try: self.disabled = True except AttributeError: raise AttributeError('The disabled property is read-only.') def _log(self, level, args, kwargs): exc_info = kwargs.pop('exc_info', None) extra = kwargs.pop('extra', None) frame_correction = kwargs.pop('frame_correction', 0) self.make_record_and_handle(level, args[0], args[1:], kwargs, exc_info, extra, frame_correction) class RecordDispatcher(object): """A record dispatcher is the internal base class that implements the logic used by the :class:`~logbook.Logger`. """ #: If this is set to `True` the dispatcher information will be suppressed #: for log records emitted from this logger. suppress_dispatcher = False def __init__(self, name=None, level=NOTSET): #: the name of the record dispatcher self.name = name #: list of handlers specific for this record dispatcher self.handlers = [] #: optionally the name of the group this logger belongs to self.group = None #: the level of the record dispatcher as integer self.level = level disabled = group_reflected_property('disabled', False) level = group_reflected_property('level', NOTSET, fallback=NOTSET) def handle(self, record): """Call the handlers for the specified record. This is invoked automatically when a record should be handled. The default implementation checks if the dispatcher is disabled and if the record level is greater than the level of the record dispatcher. In that case it will call the handlers (:meth:`call_handlers`). """ if not self.disabled and record.level >= self.level: self.call_handlers(record) def make_record_and_handle(self, level, msg, args, kwargs, exc_info, extra, frame_correction): """Creates a record from some given arguments and heads it over to the handling system. """ # The channel information can be useful for some use cases which is # why we keep it on there. The log record however internally will # only store a weak reference to the channel, so it might disappear # from one instruction to the other. It will also disappear when # a log record is transmitted to another process etc. channel = None if not self.suppress_dispatcher: channel = self record = LogRecord(self.name, level, msg, args, kwargs, exc_info, extra, None, channel, frame_correction) # after handling the log record is closed which will remove some # referenes that would require a GC run on cpython. This includes # the current stack frame, exception information. However there are # some use cases in keeping the records open for a little longer. # For example the test handler keeps log records open until the # test handler is closed to allow assertions based on stack frames # and exception information. try: self.handle(record) finally: record.late = True if not record.keep_open: record.close() def call_handlers(self, record): """Pass a record to all relevant handlers in the following order: - per-dispatcher handlers are handled first - afterwards all the current context handlers in the order they were pushed Before the first handler is invoked, the record is processed (:meth:`process_record`). """ # for performance reasons records are only heavy initialized # and processed if at least one of the handlers has a higher # level than the record and that handler is not a black hole. record_initialized = False # Both logger attached handlers as well as context specific # handlers are handled one after another. The latter also # include global handlers. for handler in chain(self.handlers, Handler.stack_manager.iter_context_objects()): # skip records that this handler is not interested in based # on the record and handler level or in case this method was # overridden on some custom logic. if not handler.should_handle(record): continue # first case of blackhole (without filter). # this should discard all further processing and # we don't have to heavy_init to know that... if handler.filter is None and handler.blackhole: break # we are about to handle the record. If it was not yet # processed by context-specific record processors we # have to do that now and remeber that we processed # the record already. if not record_initialized: record.heavy_init() self.process_record(record) record_initialized = True # a filter can still veto the handling of the record. This # however is already operating on an initialized and processed # record. The impact is that filters are slower than the # handler's should_handle function in case there is no default # handler that would handle the record (delayed init). if (handler.filter is not None and not handler.filter(record, handler)): continue # We might have a filter, so now that we know we *should* handle # this record, we should consider the case of us being a black hole... if handler.blackhole: break # handle the record. If the record was handled and # the record is not bubbling we can abort now. if handler.handle(record) and not handler.bubble: break def process_record(self, record): """Processes the record with all context specific processors. This can be overriden to also inject additional information as necessary that can be provided by this record dispatcher. """ if self.group is not None: self.group.process_record(record) for processor in Processor.stack_manager.iter_context_objects(): processor.process(record) class Logger(RecordDispatcher, LoggerMixin): """Instances of the Logger class represent a single logging channel. A "logging channel" indicates an area of an application. Exactly how an "area" is defined is up to the application developer. Names used by logbook should be descriptive and are intended for user display, not for filtering. Filtering should happen based on the context information instead. A logger internally is a subclass of a :class:`~logbook.base.RecordDispatcher` that implements the actual logic. If you want to implement a custom logger class, have a look at the interface of that class as well. """ class LoggerGroup(object): """A LoggerGroup represents a group of loggers. It cannot emit log messages on its own but it can be used to set the disabled flag and log level of all loggers in the group. Furthermore the :meth:`process_record` method of the group is called by any logger in the group which by default calls into the :attr:`processor` callback function. """ def __init__(self, loggers=None, level=NOTSET, processor=None): #: a list of all loggers on the logger group. Use the #: :meth:`add_logger` and :meth:`remove_logger` methods to add #: or remove loggers from this list. self.loggers = [] if loggers is not None: for logger in loggers: self.add_logger(logger) #: the level of the group. This is reflected to the loggers #: in the group unless they overrode the setting. self.level = lookup_level(level) #: the disabled flag for all loggers in the group, unless #: the loggers overrode the setting. self.disabled = False #: an optional callback function that is executed to process #: the log records of all loggers in the group. self.processor = processor def add_logger(self, logger): """Adds a logger to this group.""" assert logger.group is None, 'Logger already belongs to a group' logger.group = self self.loggers.append(logger) def remove_logger(self, logger): """Removes a logger from the group.""" self.loggers.remove(logger) logger.group = None def process_record(self, record): """Like :meth:`Logger.process_record` but for all loggers in the group. By default this calls into the :attr:`processor` function is it's not `None`. """ if self.processor is not None: self.processor(record) def enable(self, force=False): """Convenience method to enable this group. :param force: Force enable loggers that were explicitly set. :raises AttributeError: If ``force=True`` and the disabled property of a logger is read-only, typically because it was overridden in a subclass. .. versionadded:: 1.0 """ self.disabled = False if force: for logger in self.loggers: rv = getattr(logger, '_disabled', _missing) if rv is not _missing: logger.enable() def disable(self, force=False): """Convenience method to disable this group. :param force: Force disable loggers that were explicitly set. :raises AttributeError: If ``force=True`` and the disabled property of a logger is read-only, typically because it was overridden in a subclass. .. versionadded:: 1.0 """ self.disabled = True if force: for logger in self.loggers: rv = getattr(logger, '_disabled', _missing) if rv is not _missing: logger.disable() _default_dispatcher = RecordDispatcher() def dispatch_record(record): """Passes a record on to the handlers on the stack. This is useful when log records are created programmatically and already have all the information attached and should be dispatched independent of a logger. """ _default_dispatcher.call_handlers(record) # at that point we are safe to import handler from logbook.handlers import Handler # isort:skip logbook-1.5.3/logbook/compat.py000066400000000000000000000242111355165376200164620ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ logbook.compat ~~~~~~~~~~~~~~ Backwards compatibility with stdlib's logging package and the warnings module. :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ import collections import logging import sys import warnings from datetime import date, datetime import logbook from logbook.helpers import u, string_types, iteritems, collections_abc _epoch_ord = date(1970, 1, 1).toordinal() def redirect_logging(set_root_logger_level=True): """Permanently redirects logging to the stdlib. This also removes all otherwise registered handlers on root logger of the logging system but leaves the other loggers untouched. :param set_root_logger_level: controls of the default level of the legacy root logger is changed so that all legacy log messages get redirected to Logbook """ del logging.root.handlers[:] logging.root.addHandler(RedirectLoggingHandler()) if set_root_logger_level: logging.root.setLevel(logging.DEBUG) class redirected_logging(object): """Temporarily redirects logging for all threads and reverts it later to the old handlers. Mainly used by the internal unittests:: from logbook.compat import redirected_logging with redirected_logging(): ... """ def __init__(self, set_root_logger_level=True): self.old_handlers = logging.root.handlers[:] self.old_level = logging.root.level self.set_root_logger_level = set_root_logger_level def start(self): redirect_logging(self.set_root_logger_level) def end(self, etype=None, evalue=None, tb=None): logging.root.handlers[:] = self.old_handlers logging.root.setLevel(self.old_level) __enter__ = start __exit__ = end class LoggingCompatRecord(logbook.LogRecord): def _format_message(self, msg, *args, **kwargs): if kwargs: assert not args return msg % kwargs else: assert not kwargs return msg % tuple(args) class RedirectLoggingHandler(logging.Handler): """A handler for the stdlib's logging system that redirects transparently to logbook. This is used by the :func:`redirect_logging` and :func:`redirected_logging` functions. If you want to customize the redirecting you can subclass it. """ def __init__(self): logging.Handler.__init__(self) def convert_level(self, level): """Converts a logging level into a logbook level.""" if level >= logging.CRITICAL: return logbook.CRITICAL if level >= logging.ERROR: return logbook.ERROR if level >= logging.WARNING: return logbook.WARNING if level >= logging.INFO: return logbook.INFO return logbook.DEBUG def find_extra(self, old_record): """Tries to find custom data from the old logging record. The return value is a dictionary that is merged with the log record extra dictionaries. """ rv = vars(old_record).copy() for key in ('name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'greenlet', 'processName', 'process'): rv.pop(key, None) return rv def find_caller(self, old_record): """Tries to find the caller that issued the call.""" frm = sys._getframe(2) while frm is not None: if (frm.f_globals is globals() or frm.f_globals is logbook.base.__dict__ or frm.f_globals is logging.__dict__): frm = frm.f_back else: return frm def convert_time(self, timestamp): """Converts the UNIX timestamp of the old record into a datetime object as used by logbook. """ return datetime.utcfromtimestamp(timestamp) def convert_record(self, old_record): """Converts an old logging record into a logbook log record.""" args = old_record.args kwargs = None # Logging allows passing a mapping object, in which case args will be a mapping. if isinstance(args, collections_abc.Mapping): kwargs = args args = None record = LoggingCompatRecord(old_record.name, self.convert_level(old_record.levelno), old_record.msg, args, kwargs, old_record.exc_info, self.find_extra(old_record), self.find_caller(old_record)) record.time = self.convert_time(old_record.created) return record def emit(self, record): logbook.dispatch_record(self.convert_record(record)) class LoggingHandler(logbook.Handler): """Does the opposite of the :class:`RedirectLoggingHandler`, it sends messages from logbook to logging. Because of that, it's a very bad idea to configure both. This handler is for logbook and will pass stuff over to a logger from the standard library. Example usage:: from logbook.compat import LoggingHandler, warn with LoggingHandler(): warn('This goes to logging') """ def __init__(self, logger=None, level=logbook.NOTSET, filter=None, bubble=False): logbook.Handler.__init__(self, level, filter, bubble) if logger is None: logger = logging.getLogger() elif isinstance(logger, string_types): logger = logging.getLogger(logger) self.logger = logger def get_logger(self, record): """Returns the logger to use for this record. This implementation always return :attr:`logger`. """ return self.logger def convert_level(self, level): """Converts a logbook level into a logging level.""" if level >= logbook.CRITICAL: return logging.CRITICAL if level >= logbook.ERROR: return logging.ERROR if level >= logbook.WARNING: return logging.WARNING if level >= logbook.INFO: return logging.INFO return logging.DEBUG def convert_time(self, dt): """Converts a datetime object into a timestamp.""" year, month, day, hour, minute, second = dt.utctimetuple()[:6] days = date(year, month, 1).toordinal() - _epoch_ord + day - 1 hours = days * 24 + hour minutes = hours * 60 + minute seconds = minutes * 60 + second return seconds def convert_record(self, old_record): """Converts a record from logbook to logging.""" if sys.version_info >= (2, 5): # make sure 2to3 does not screw this up optional_kwargs = {'func': getattr(old_record, 'func_name')} else: optional_kwargs = {} record = logging.LogRecord(old_record.channel, self.convert_level(old_record.level), old_record.filename, old_record.lineno, old_record.message, (), old_record.exc_info, **optional_kwargs) for key, value in iteritems(old_record.extra): record.__dict__.setdefault(key, value) record.created = self.convert_time(old_record.time) return record def emit(self, record): self.get_logger(record).handle(self.convert_record(record)) def redirect_warnings(): """Like :func:`redirected_warnings` but will redirect all warnings to the shutdown of the interpreter: .. code-block:: python from logbook.compat import redirect_warnings redirect_warnings() """ redirected_warnings().__enter__() class redirected_warnings(object): """A context manager that copies and restores the warnings filter upon exiting the context, and logs warnings using the logbook system. The :attr:`~logbook.LogRecord.channel` attribute of the log record will be the import name of the warning. Example usage: .. code-block:: python from logbook.compat import redirected_warnings from warnings import warn with redirected_warnings(): warn(DeprecationWarning('logging should be deprecated')) """ def __init__(self): self._entered = False def message_to_unicode(self, message): try: return u(str(message)) except UnicodeError: return str(message).decode('utf-8', 'replace') def make_record(self, message, exception, filename, lineno): category = exception.__name__ if exception.__module__ not in ('exceptions', 'builtins'): category = exception.__module__ + '.' + category rv = logbook.LogRecord(category, logbook.WARNING, message) # we don't know the caller, but we get that information from the # warning system. Just attach them. rv.filename = filename rv.lineno = lineno return rv def start(self): if self._entered: # pragma: no cover raise RuntimeError("Cannot enter %r twice" % self) self._entered = True self._filters = warnings.filters warnings.filters = self._filters[:] self._showwarning = warnings.showwarning def showwarning(message, category, filename, lineno, file=None, line=None): message = self.message_to_unicode(message) record = self.make_record(message, category, filename, lineno) logbook.dispatch_record(record) warnings.showwarning = showwarning def end(self, etype=None, evalue=None, tb=None): if not self._entered: # pragma: no cover raise RuntimeError("Cannot exit %r without entering first" % self) warnings.filters = self._filters warnings.showwarning = self._showwarning __enter__ = start __exit__ = end logbook-1.5.3/logbook/concurrency.py000066400000000000000000000141611355165376200175340ustar00rootroot00000000000000has_gevent = True use_gevent = False try: import gevent def enable_gevent(): global use_gevent use_gevent = True def _disable_gevent(): # for testing global use_gevent use_gevent = False def is_gevent_enabled(): global use_gevent return use_gevent except ImportError: has_gevent = False def enable_gevent(): pass def _disable_gevent(): pass def is_gevent_enabled(): return False if has_gevent: from gevent.monkey import get_original as _get_original ThreadLock = _get_original('threading', 'Lock') ThreadRLock = _get_original('threading', 'RLock') try: thread_get_ident = _get_original('threading', 'get_ident') except AttributeError: # In 2.7, this is called _get_ident thread_get_ident = _get_original('threading', '_get_ident') thread_local = _get_original('threading', 'local') from gevent.thread import get_ident as greenlet_get_ident from gevent.local import local as greenlet_local from gevent.lock import BoundedSemaphore from gevent.threading import __threading__ def thread_get_name(): return __threading__.currentThread().getName() class GreenletRLock(object): def __init__(self): self._thread_local = thread_local() self._owner = None self._wait_queue = [] self._count = 0 def __repr__(self): owner = self._owner return "<%s owner=%r count=%d>" % (self.__class__.__name__, owner, self._count) def acquire(self, blocking=1): tid = thread_get_ident() gid = greenlet_get_ident() tid_gid = (tid, gid) # We trust the GIL here so we can do this comparison w/o locking. if tid_gid == self._owner: self._count += 1 return True greenlet_lock = self._get_greenlet_lock() self._wait_queue.append(gid) # this is a safety in case an exception is raised somewhere # and we must make sure we're not in the queue # otherwise it'll get stuck forever. remove_from_queue_on_return = True try: while True: if not greenlet_lock.acquire(blocking): return False # non-blocking and failed to acquire lock if self._wait_queue[0] == gid: # Hurray, we can have the lock. self._owner = tid_gid self._count = 1 # don't remove us from the queue remove_from_queue_on_return = False return True else: # we already hold the greenlet lock so obviously # the owner is not in our thread. greenlet_lock.release() if blocking: # 500 us -> initial delay of 1 ms gevent.sleep(0.0005) else: return False finally: if remove_from_queue_on_return: self._wait_queue.remove(gid) def release(self): tid_gid = (thread_get_ident(), greenlet_get_ident()) if tid_gid != self._owner: raise RuntimeError("cannot release un-acquired lock") self._count -= 1 if not self._count: self._owner = None gid = self._wait_queue.pop(0) assert gid == tid_gid[1] self._thread_local.greenlet_lock.release() __enter__ = acquire def __exit__(self, t, v, tb): self.release() def _get_greenlet_lock(self): if not hasattr(self._thread_local, 'greenlet_lock'): greenlet_lock = self._thread_local.greenlet_lock = BoundedSemaphore(1) else: greenlet_lock = self._thread_local.greenlet_lock return greenlet_lock def _is_owned(self): return self._owner == (thread_get_ident(), greenlet_get_ident()) else: from threading import ( Lock as ThreadLock, RLock as ThreadRLock, currentThread) try: from thread import ( get_ident as thread_get_ident, _local as thread_local) except ImportError: from _thread import ( get_ident as thread_get_ident, _local as thread_local) def thread_get_name(): return currentThread().getName() greenlet_get_ident = thread_get_ident greenlet_local = thread_local class GreenletRLock(object): def acquire(self): pass def release(self): pass def __enter__(self): pass def __exit__(self, t, v, tb): pass def new_fine_grained_lock(): global use_gevent if use_gevent: return GreenletRLock() else: return ThreadRLock() has_contextvars = True try: import contextvars except ImportError: has_contextvars = False if has_contextvars: from contextvars import ContextVar from itertools import count context_ident_counter = count() context_ident = ContextVar('context_ident') def context_get_ident(): try: return context_ident.get() except LookupError: ident = 'context-%s' % next(context_ident_counter) context_ident.set(ident) return ident def is_context_enabled(): try: context_ident.get() return True except LookupError: return False else: class ContextVar(object): def __init__(self, name): self.name = name self.local = thread_local() def set(self, value): self.local = value def get(self, default=None): if self.local is None: return default return default def context_get_ident(): return 1 def is_context_enabled(): return False logbook-1.5.3/logbook/handlers.py000066400000000000000000002135561355165376200170130ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ logbook.handlers ~~~~~~~~~~~~~~~~ The handler interface and builtin handlers. :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ import io import os import re import sys import stat import errno import socket import gzip import math try: from hashlib import sha1 except ImportError: from sha import new as sha1 import traceback import collections from datetime import datetime, timedelta from collections import deque from textwrap import dedent from logbook.base import ( CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, TRACE, NOTSET, level_name_property, _missing, lookup_level, Flags, ContextObject, ContextStackManager, _datetime_factory) from logbook.helpers import ( rename, b, _is_text_stream, is_unicode, PY2, zip, xrange, string_types, collections_abc, integer_types, reraise, u, with_metaclass) from logbook.concurrency import new_fine_grained_lock DEFAULT_FORMAT_STRING = u( '[{record.time:%Y-%m-%d %H:%M:%S.%f%z}] ' '{record.level_name}: {record.channel}: {record.message}') SYSLOG_FORMAT_STRING = u('{record.channel}: {record.message}') NTLOG_FORMAT_STRING = dedent(u(''' Message Level: {record.level_name} Location: {record.filename}:{record.lineno} Module: {record.module} Function: {record.func_name} Exact Time: {record.time:%Y-%m-%d %H:%M:%S} Event provided Message: {record.message} ''')).lstrip() TEST_FORMAT_STRING = u('[{record.level_name}] {record.channel}: {record.message}') MAIL_FORMAT_STRING = dedent(u(''' Subject: {handler.subject} Message type: {record.level_name} Location: {record.filename}:{record.lineno} Module: {record.module} Function: {record.func_name} Time: {record.time:%Y-%m-%d %H:%M:%S} Message: {record.message} ''')).lstrip() MAIL_RELATED_FORMAT_STRING = dedent(u(''' Message type: {record.level_name} Location: {record.filename}:{record.lineno} Module: {record.module} Function: {record.func_name} {record.message} ''')).lstrip() SYSLOG_PORT = 514 REGTYPE = type(re.compile("I'm a regular expression!")) def create_syshandler(application_name, level=NOTSET): """Creates the handler the operating system provides. On Unix systems this creates a :class:`SyslogHandler`, on Windows sytems it will create a :class:`NTEventLogHandler`. """ if os.name == 'nt': return NTEventLogHandler(application_name, level=level) return SyslogHandler(application_name, level=level) class _HandlerType(type): """The metaclass of handlers injects a destructor if the class has an overridden close method. This makes it possible that the default handler class as well as all subclasses that don't need cleanup to be collected with less overhead. """ def __new__(cls, name, bases, d): # aha, that thing has a custom close method. We will need a magic # __del__ for it to be called on cleanup. if (bases != (ContextObject,) and 'close' in d and '__del__' not in d and not any(hasattr(x, '__del__') for x in bases)): def _magic_del(self): try: self.close() except Exception: # del is also invoked when init fails, so we better just # ignore any exception that might be raised here pass d['__del__'] = _magic_del return type.__new__(cls, name, bases, d) class Handler(with_metaclass(_HandlerType), ContextObject): """Handler instances dispatch logging events to specific destinations. The base handler class. Acts as a placeholder which defines the Handler interface. Handlers can optionally use Formatter instances to format records as desired. By default, no formatter is specified; in this case, the 'raw' message as determined by record.message is logged. To bind a handler you can use the :meth:`push_application`, :meth:`push_thread` or :meth:`push_greenlet` methods. This will push the handler on a stack of handlers. To undo this, use the :meth:`pop_application`, :meth:`pop_thread` methods and :meth:`pop_greenlet`:: handler = MyHandler() handler.push_application() # all here goes to that handler handler.pop_application() By default messages sent to that handler will not go to a handler on an outer level on the stack, if handled. This can be changed by setting bubbling to `True`. There are also context managers to setup the handler for the duration of a `with`-block:: with handler.applicationbound(): ... with handler.threadbound(): ... with handler.greenletbound(): ... Because `threadbound` is a common operation, it is aliased to a with on the handler itself if not using gevent:: with handler: ... If gevent is enabled, the handler is aliased to `greenletbound`. """ stack_manager = ContextStackManager() #: a flag for this handler that can be set to `True` for handlers that #: are consuming log records but are not actually displaying it. This #: flag is set for the :class:`NullHandler` for instance. blackhole = False def __init__(self, level=NOTSET, filter=None, bubble=False): #: the level for the handler. Defaults to `NOTSET` which #: consumes all entries. self.level = lookup_level(level) #: the formatter to be used on records. This is a function #: that is passed a log record as first argument and the #: handler as second and returns something formatted #: (usually a unicode string) self.formatter = None #: the filter to be used with this handler self.filter = filter #: the bubble flag of this handler self.bubble = bubble level_name = level_name_property() def format(self, record): """Formats a record with the given formatter. If no formatter is set, the record message is returned. Generally speaking the return value is most likely a unicode string, but nothing in the handler interface requires a formatter to return a unicode string. The combination of a handler and formatter might have the formatter return an XML element tree for example. """ if self.formatter is None: return record.message return self.formatter(record, self) def should_handle(self, record): """Returns `True` if this handler wants to handle the record. The default implementation checks the level. """ return record.level >= self.level def handle(self, record): """Emits the record and falls back. It tries to :meth:`emit` the record and if that fails, it will call into :meth:`handle_error` with the record and traceback. This function itself will always emit when called, even if the logger level is higher than the record's level. If this method returns `False` it signals to the calling function that no recording took place in which case it will automatically bubble. This should not be used to signal error situations. The default implementation always returns `True`. """ try: self.emit(record) except Exception: self.handle_error(record, sys.exc_info()) return True def emit(self, record): """Emit the specified logging record. This should take the record and deliver it to whereever the handler sends formatted log records. """ def emit_batch(self, records, reason): """Some handlers may internally queue up records and want to forward them at once to another handler. For example the :class:`~logbook.FingersCrossedHandler` internally buffers records until a level threshold is reached in which case the buffer is sent to this method and not :meth:`emit` for each record. The default behaviour is to call :meth:`emit` for each record in the buffer, but handlers can use this to optimize log handling. For instance the mail handler will try to batch up items into one mail and not to emit mails for each record in the buffer. Note that unlike :meth:`emit` there is no wrapper method like :meth:`handle` that does error handling. The reason is that this is intended to be used by other handlers which are already protected against internal breakage. `reason` is a string that specifies the rason why :meth:`emit_batch` was called, and not :meth:`emit`. The following are valid values: ``'buffer'`` Records were buffered for performance reasons or because the records were sent to another process and buffering was the only possible way. For most handlers this should be equivalent to calling :meth:`emit` for each record. ``'escalation'`` Escalation means that records were buffered in case the threshold was exceeded. In this case, the last record in the iterable is the record that triggered the call. ``'group'`` All the records in the iterable belong to the same logical component and happened in the same process. For example there was a long running computation and the handler is invoked with a bunch of records that happened there. This is similar to the escalation reason, just that the first one is the significant one, not the last. If a subclass overrides this and does not want to handle a specific reason it must call into the superclass because more reasons might appear in future releases. Example implementation:: def emit_batch(self, records, reason): if reason not in ('escalation', 'group'): Handler.emit_batch(self, records, reason) ... """ for record in records: self.emit(record) def close(self): """Tidy up any resources used by the handler. This is automatically called by the destructor of the class as well, but explicit calls are encouraged. Make sure that multiple calls to close are possible. """ def handle_error(self, record, exc_info): """Handle errors which occur during an emit() call. The behaviour of this function depends on the current `errors` setting. Check :class:`Flags` for more information. """ try: behaviour = Flags.get_flag('errors', 'print') if behaviour == 'raise': reraise(exc_info[0], exc_info[1], exc_info[2]) elif behaviour == 'print': traceback.print_exception(*(exc_info + (None, sys.stderr))) sys.stderr.write('Logged from file %s, line %s\n' % ( record.filename, record.lineno)) except IOError: pass class NullHandler(Handler): """A handler that does nothing. Useful to silence logs above a certain location in the handler stack:: handler = NullHandler() handler.push_application() NullHandlers swallow all logs sent to them, and do not bubble them onwards. """ blackhole = True def __init__(self, level=NOTSET, filter=None): super(NullHandler, self).__init__(level=level, filter=filter, bubble=False) class WrapperHandler(Handler): """A class that can wrap another handler and redirect all calls to the wrapped handler:: handler = WrapperHandler(other_handler) Subclasses should override the :attr:`_direct_attrs` attribute as necessary. """ #: a set of direct attributes that are not forwarded to the inner #: handler. This has to be extended as necessary. _direct_attrs = frozenset(['handler']) def __init__(self, handler): self.handler = handler def __getattr__(self, name): return getattr(self.handler, name) def __setattr__(self, name, value): if name in self._direct_attrs: return Handler.__setattr__(self, name, value) setattr(self.handler, name, value) class StringFormatter(object): """Many handlers format the log entries to text format. This is done by a callable that is passed a log record and returns an unicode string. The default formatter for this is implemented as a class so that it becomes possible to hook into every aspect of the formatting process. """ def __init__(self, format_string): self.format_string = format_string def _get_format_string(self): return self._format_string def _set_format_string(self, value): self._format_string = value self._formatter = value format_string = property(_get_format_string, _set_format_string) del _get_format_string, _set_format_string def format_record(self, record, handler): try: return self._formatter.format(record=record, handler=handler) except UnicodeEncodeError: # self._formatter is a str, but some of the record items # are unicode fmt = self._formatter.decode('ascii', 'replace') return fmt.format(record=record, handler=handler) except UnicodeDecodeError: # self._formatter is unicode, but some of the record items # are non-ascii str fmt = self._formatter.encode('ascii', 'replace') return fmt.format(record=record, handler=handler) def format_exception(self, record): return record.formatted_exception def __call__(self, record, handler): line = self.format_record(record, handler) exc = self.format_exception(record) if exc: line += u('\n') + exc return line class StringFormatterHandlerMixin(object): """A mixin for handlers that provides a default integration for the :class:`~logbook.StringFormatter` class. This is used for all handlers by default that log text to a destination. """ #: a class attribute for the default format string to use if the #: constructor was invoked with `None`. default_format_string = DEFAULT_FORMAT_STRING #: the class to be used for string formatting formatter_class = StringFormatter def __init__(self, format_string): if format_string is None: format_string = self.default_format_string #: the currently attached format string as new-style format #: string. self.format_string = format_string def _get_format_string(self): if isinstance(self.formatter, StringFormatter): return self.formatter.format_string def _set_format_string(self, value): if value is None: self.formatter = None else: self.formatter = self.formatter_class(value) format_string = property(_get_format_string, _set_format_string) del _get_format_string, _set_format_string class HashingHandlerMixin(object): """Mixin class for handlers that are hashing records.""" def hash_record_raw(self, record): """Returns a hashlib object with the hash of the record.""" hash = sha1() hash.update(('%d\x00' % record.level).encode('ascii')) hash.update((record.channel or u('')).encode('utf-8') + b('\x00')) hash.update(record.filename.encode('utf-8') + b('\x00')) hash.update(b(str(record.lineno))) return hash def hash_record(self, record): """Returns a hash for a record to keep it apart from other records. This is used for the `record_limit` feature. By default The level, channel, filename and location are hashed. Calls into :meth:`hash_record_raw`. """ return self.hash_record_raw(record).hexdigest() _NUMBER_TYPES = integer_types + (float,) class LimitingHandlerMixin(HashingHandlerMixin): """Mixin class for handlers that want to limit emitting records. In the default setting it delivers all log records but it can be set up to not send more than n mails for the same record each hour to not overload an inbox and the network in case a message is triggered multiple times a minute. The following example limits it to 60 mails an hour:: from datetime import timedelta handler = MailHandler(record_limit=1, record_delta=timedelta(minutes=1)) """ def __init__(self, record_limit, record_delta): self.record_limit = record_limit self._limit_lock = new_fine_grained_lock() self._record_limits = {} if record_delta is None: record_delta = timedelta(seconds=60) elif isinstance(record_delta, _NUMBER_TYPES): record_delta = timedelta(seconds=record_delta) self.record_delta = record_delta def check_delivery(self, record): """Helper function to check if data should be delivered by this handler. It returns a tuple in the form ``(suppression_count, allow)``. The first one is the number of items that were not delivered so far, the second is a boolean flag if a delivery should happen now. """ if self.record_limit is None: return 0, True hash = self.hash_record(record) self._limit_lock.acquire() try: allow_delivery = None suppression_count = old_count = 0 first_count = now = datetime.utcnow() if hash in self._record_limits: last_count, suppression_count = self._record_limits[hash] if last_count + self.record_delta < now: allow_delivery = True else: first_count = last_count old_count = suppression_count if (not suppression_count and len(self._record_limits) >= self.max_record_cache): cache_items = sorted(self._record_limits.items()) del cache_items[:int(self._record_limits) * self.record_cache_prune] self._record_limits = dict(cache_items) self._record_limits[hash] = (first_count, old_count + 1) if allow_delivery is None: allow_delivery = old_count < self.record_limit return suppression_count, allow_delivery finally: self._limit_lock.release() class StreamHandler(Handler, StringFormatterHandlerMixin): """a handler class which writes logging records, appropriately formatted, to a stream. note that this class does not close the stream, as sys.stdout or sys.stderr may be used. If a stream handler is used in a `with` statement directly it will :meth:`close` on exit to support this pattern:: with StreamHandler(my_stream): pass .. admonition:: Notes on the encoding On Python 3, the encoding parameter is only used if a stream was passed that was opened in binary mode. """ def __init__(self, stream, level=NOTSET, format_string=None, encoding=None, filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) self.encoding = encoding self.lock = new_fine_grained_lock() if stream is not _missing: self.stream = stream def __enter__(self): return Handler.__enter__(self) def __exit__(self, exc_type, exc_value, tb): self.close() return Handler.__exit__(self, exc_type, exc_value, tb) def ensure_stream_is_open(self): """this method should be overriden in sub-classes to ensure that the inner stream is open """ pass def close(self): """The default stream handler implementation is not to close the wrapped stream but to flush it. """ self.flush() def flush(self): """Flushes the inner stream.""" if self.stream is not None and hasattr(self.stream, 'flush'): self.stream.flush() def encode(self, msg): """Encodes the message to the stream encoding.""" stream = self.stream rv = msg + '\n' if ((PY2 and is_unicode(rv)) or not (PY2 or is_unicode(rv) or _is_text_stream(stream))): enc = self.encoding if enc is None: enc = getattr(stream, 'encoding', None) or 'utf-8' rv = rv.encode(enc, 'replace') return rv def write(self, item): """Writes a bytestring to the stream.""" self.stream.write(item) def emit(self, record): msg = self.format(record) self.lock.acquire() try: self.ensure_stream_is_open() self.write(self.encode(msg)) if self.should_flush(): self.flush() finally: self.lock.release() def should_flush(self): return True class FileHandler(StreamHandler): """A handler that does the task of opening and closing files for you. By default the file is opened right away, but you can also `delay` the open to the point where the first message is written. This is useful when the handler is used with a :class:`~logbook.FingersCrossedHandler` or something similar. """ def __init__(self, filename, mode='a', encoding=None, level=NOTSET, format_string=None, delay=False, filter=None, bubble=False): if encoding is None: encoding = 'utf-8' StreamHandler.__init__(self, None, level, format_string, encoding, filter, bubble) self._filename = filename self._mode = mode if delay: self.stream = None else: self._open() def _open(self, mode=None): if mode is None: mode = self._mode self.stream = io.open(self._filename, mode, encoding=self.encoding) def write(self, item): self.ensure_stream_is_open() if isinstance(item, bytes): self.stream.buffer.write(item) else: self.stream.write(item) def close(self): self.lock.acquire() try: if self.stream is not None: self.flush() self.stream.close() self.stream = None finally: self.lock.release() def encode(self, record): # encodes based on the stream settings, so the stream has to be # open at the time this function is called. self.ensure_stream_is_open() return StreamHandler.encode(self, record) def ensure_stream_is_open(self): if self.stream is None: self._open() class GZIPCompressionHandler(FileHandler): def __init__(self, filename, encoding=None, level=NOTSET, format_string=None, delay=False, filter=None, bubble=False, compression_quality=9): self._compression_quality = compression_quality super(GZIPCompressionHandler, self).__init__(filename, mode='wb', encoding=encoding, level=level, format_string=format_string, delay=delay, filter=filter, bubble=bubble) def _open(self, mode=None): if mode is None: mode = self._mode self.stream = gzip.open(self._filename, mode, compresslevel=self._compression_quality) def write(self, item): if isinstance(item, str): item = item.encode(encoding=self.encoding) self.ensure_stream_is_open() self.stream.write(item) def should_flush(self): # gzip manages writes independently. Flushing prematurely could mean # duplicate flushes and thus bloated files return False class BrotliCompressionHandler(FileHandler): def __init__(self, filename, encoding=None, level=NOTSET, format_string=None, delay=False, filter=None, bubble=False, compression_window_size=4*1024**2, compression_quality=11): super(BrotliCompressionHandler, self).__init__(filename, mode='wb', encoding=encoding, level=level, format_string=format_string, delay=delay, filter=filter, bubble=bubble) try: from brotli import Compressor except ImportError: raise RuntimeError('The brotli library is required for ' 'the BrotliCompressionHandler.') max_window_size = int(math.log(compression_window_size, 2)) self._compressor = Compressor(quality=compression_quality, lgwin=max_window_size) def _open(self, mode=None): if mode is None: mode = self._mode self.stream = io.open(self._filename, mode) def write(self, item): if isinstance(item, str): item = item.encode(encoding=self.encoding) ret = self._compressor.process(item) if ret: self.ensure_stream_is_open() self.stream.write(ret) super(BrotliCompressionHandler, self).flush() def should_flush(self): return False def flush(self): if self._compressor is not None: ret = self._compressor.flush() if ret: self.ensure_stream_is_open() self.stream.write(ret) super(BrotliCompressionHandler, self).flush() def close(self): if self._compressor is not None: self.ensure_stream_is_open() self.stream.write(self._compressor.finish()) self._compressor = None super(BrotliCompressionHandler, self).close() class MonitoringFileHandler(FileHandler): """A file handler that will check if the file was moved while it was open. This might happen on POSIX systems if an application like logrotate moves the logfile over. Because of different IO concepts on Windows, this handler will not work on a windows system. """ def __init__(self, filename, mode='a', encoding='utf-8', level=NOTSET, format_string=None, delay=False, filter=None, bubble=False): FileHandler.__init__(self, filename, mode, encoding, level, format_string, delay, filter, bubble) if os.name == 'nt': raise RuntimeError('MonitoringFileHandler ' 'does not support Windows') self._query_fd() def _query_fd(self): if self.stream is None: self._last_stat = None, None else: try: st = os.stat(self._filename) except OSError: e = sys.exc_info()[1] if e.errno != errno.ENOENT: raise self._last_stat = None, None else: self._last_stat = st[stat.ST_DEV], st[stat.ST_INO] def emit(self, record): msg = self.format(record) self.lock.acquire() try: last_stat = self._last_stat self._query_fd() if last_stat != self._last_stat and self.stream is not None: self.flush() self.stream.close() self.stream = None self.ensure_stream_is_open() self.write(self.encode(msg)) self.flush() self._query_fd() finally: self.lock.release() class StderrHandler(StreamHandler): """A handler that writes to what is currently at stderr. At the first glace this appears to just be a :class:`StreamHandler` with the stream set to :data:`sys.stderr` but there is a difference: if the handler is created globally and :data:`sys.stderr` changes later, this handler will point to the current `stderr`, whereas a stream handler would still point to the old one. """ def __init__(self, level=NOTSET, format_string=None, filter=None, bubble=False): StreamHandler.__init__(self, _missing, level, format_string, None, filter, bubble) @property def stream(self): return sys.stderr class RotatingFileHandler(FileHandler): """This handler rotates based on file size. Once the maximum size is reached it will reopen the file and start with an empty file again. The old file is moved into a backup copy (named like the file, but with a ``.backupnumber`` appended to the file. So if you are logging to ``mail`` the first backup copy is called ``mail.1``.) The default number of backups is 5. Unlike a similar logger from the logging package, the backup count is mandatory because just reopening the file is dangerous as it deletes the log without asking on rollover. """ def __init__(self, filename, mode='a', encoding='utf-8', level=NOTSET, format_string=None, delay=False, max_size=1024 * 1024, backup_count=5, filter=None, bubble=False): FileHandler.__init__(self, filename, mode, encoding, level, format_string, delay, filter, bubble) self.max_size = max_size self.backup_count = backup_count assert backup_count > 0, ('at least one backup file has to be ' 'specified') def should_rollover(self, record, bytes): self.stream.seek(0, 2) return self.stream.tell() + bytes >= self.max_size def perform_rollover(self): self.stream.close() for x in xrange(self.backup_count - 1, 0, -1): src = '%s.%d' % (self._filename, x) dst = '%s.%d' % (self._filename, x + 1) try: rename(src, dst) except OSError: e = sys.exc_info()[1] if e.errno != errno.ENOENT: raise rename(self._filename, self._filename + '.1') self._open('w') def emit(self, record): msg = self.format(record) self.lock.acquire() try: msg = self.encode(msg) if self.should_rollover(record, len(msg)): self.perform_rollover() self.write(msg) self.flush() finally: self.lock.release() class TimedRotatingFileHandler(FileHandler): """This handler rotates based on dates. It will name the file after the filename you specify and the `date_format` pattern. So for example if you configure your handler like this:: handler = TimedRotatingFileHandler('/var/log/foo.log', date_format='%Y-%m-%d') The filenames for the logfiles will look like this:: /var/log/foo-2010-01-10.log /var/log/foo-2010-01-11.log ... By default it will keep all these files around, if you want to limit them, you can specify a `backup_count`. You may supply an optional `rollover_format`. This allows you to specify the format for the filenames of rolled-over files. the format as So for example if you configure your handler like this:: handler = TimedRotatingFileHandler( '/var/log/foo.log', date_format='%Y-%m-%d', rollover_format='{basename}{ext}.{timestamp}') The filenames for the logfiles will look like this:: /var/log/foo.log.2010-01-10 /var/log/foo.log.2010-01-11 ... Finally, an optional argument `timed_filename_for_current` may be set to false if you wish to have the current log file match the supplied filename until it is rolled over """ def __init__(self, filename, mode='a', encoding='utf-8', level=NOTSET, format_string=None, date_format='%Y-%m-%d', backup_count=0, filter=None, bubble=False, timed_filename_for_current=True, rollover_format='{basename}-{timestamp}{ext}'): self.date_format = date_format self.backup_count = backup_count self.rollover_format = rollover_format self.original_filename = filename self.basename, self.ext = os.path.splitext(os.path.abspath(filename)) self.timed_filename_for_current = timed_filename_for_current self._timestamp = self._get_timestamp(_datetime_factory()) if self.timed_filename_for_current: filename = self.generate_timed_filename(self._timestamp) elif os.path.exists(filename): self._timestamp = self._get_timestamp( datetime.fromtimestamp( os.stat(filename).st_mtime ) ) FileHandler.__init__(self, filename, mode, encoding, level, format_string, True, filter, bubble) def _get_timestamp(self, datetime): """ Fetches a formatted string witha timestamp of the given datetime """ return datetime.strftime(self.date_format) def generate_timed_filename(self, timestamp): """ Produces a filename that includes a timestamp in the format supplied to the handler at init time. """ timed_filename = self.rollover_format.format( basename=self.basename, timestamp=timestamp, ext=self.ext) return timed_filename def files_to_delete(self): """Returns a list with the files that have to be deleted when a rollover occours. """ directory = os.path.dirname(self._filename) files = [] rollover_regex = re.compile(self.rollover_format.format( basename=re.escape(self.basename), timestamp='.+', ext=re.escape(self.ext), )) for filename in os.listdir(directory): filename = os.path.join(directory, filename) if rollover_regex.match(filename): files.append((os.path.getmtime(filename), filename)) files.sort() if self.backup_count > 1: return files[:-self.backup_count + 1] else: return files[:] def perform_rollover(self, new_timestamp): if self.stream is not None: self.stream.close() if ( not self.timed_filename_for_current and os.path.exists(self._filename) ): filename = self.generate_timed_filename(self._timestamp) os.rename(self._filename, filename) if self.backup_count > 0: for time, filename in self.files_to_delete(): os.remove(filename) if self.timed_filename_for_current: self._filename = self.generate_timed_filename(new_timestamp) self._timestamp = new_timestamp self._open('w') def emit(self, record): msg = self.format(record) self.lock.acquire() try: new_timestamp = self._get_timestamp(record.time) if new_timestamp != self._timestamp: self.perform_rollover(new_timestamp) self.write(self.encode(msg)) self.flush() finally: self.lock.release() class TestHandler(Handler, StringFormatterHandlerMixin): """Like a stream handler but keeps the values in memory. This logger provides some ways to test for the records in memory. Example usage:: def my_test(): with logbook.TestHandler() as handler: logger.warn('A warning') assert logger.has_warning('A warning') ... """ default_format_string = TEST_FORMAT_STRING def __init__(self, level=NOTSET, format_string=None, filter=None, bubble=False, force_heavy_init=False): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) #: captures the :class:`LogRecord`\s as instances self.records = [] self._formatted_records = [] self._formatted_record_cache = [] self._force_heavy_init = force_heavy_init def close(self): """Close all records down when the handler is closed.""" for record in self.records: record.close() def emit(self, record): # keep records open because we will want to examine them after the # call to the emit function. If we don't do that, the traceback # attribute and other things will already be removed. record.keep_open = True if self._force_heavy_init: record.heavy_init() self.records.append(record) @property def formatted_records(self): """Captures the formatted log records as unicode strings.""" if (len(self._formatted_record_cache) != len(self.records) or any(r1 != r2 for r1, r2 in zip(self.records, self._formatted_record_cache))): self._formatted_records = [self.format(r) for r in self.records] self._formatted_record_cache = list(self.records) return self._formatted_records @property def has_criticals(self): """`True` if any :data:`CRITICAL` records were found.""" return any(r.level == CRITICAL for r in self.records) @property def has_errors(self): """`True` if any :data:`ERROR` records were found.""" return any(r.level == ERROR for r in self.records) @property def has_warnings(self): """`True` if any :data:`WARNING` records were found.""" return any(r.level == WARNING for r in self.records) @property def has_notices(self): """`True` if any :data:`NOTICE` records were found.""" return any(r.level == NOTICE for r in self.records) @property def has_infos(self): """`True` if any :data:`INFO` records were found.""" return any(r.level == INFO for r in self.records) @property def has_debugs(self): """`True` if any :data:`DEBUG` records were found.""" return any(r.level == DEBUG for r in self.records) @property def has_traces(self): """`True` if any :data:`TRACE` records were found.""" return any(r.level == TRACE for r in self.records) def has_critical(self, *args, **kwargs): """`True` if a specific :data:`CRITICAL` log record exists. See :ref:`probe-log-records` for more information. """ kwargs['level'] = CRITICAL return self._test_for(*args, **kwargs) def has_error(self, *args, **kwargs): """`True` if a specific :data:`ERROR` log record exists. See :ref:`probe-log-records` for more information. """ kwargs['level'] = ERROR return self._test_for(*args, **kwargs) def has_warning(self, *args, **kwargs): """`True` if a specific :data:`WARNING` log record exists. See :ref:`probe-log-records` for more information. """ kwargs['level'] = WARNING return self._test_for(*args, **kwargs) def has_notice(self, *args, **kwargs): """`True` if a specific :data:`NOTICE` log record exists. See :ref:`probe-log-records` for more information. """ kwargs['level'] = NOTICE return self._test_for(*args, **kwargs) def has_info(self, *args, **kwargs): """`True` if a specific :data:`INFO` log record exists. See :ref:`probe-log-records` for more information. """ kwargs['level'] = INFO return self._test_for(*args, **kwargs) def has_debug(self, *args, **kwargs): """`True` if a specific :data:`DEBUG` log record exists. See :ref:`probe-log-records` for more information. """ kwargs['level'] = DEBUG return self._test_for(*args, **kwargs) def has_trace(self, *args, **kwargs): """`True` if a specific :data:`TRACE` log record exists. See :ref:`probe-log-records` for more information. """ kwargs['level'] = TRACE return self._test_for(*args, **kwargs) def _test_for(self, message=None, channel=None, level=None): def _match(needle, haystack): """Matches both compiled regular expressions and strings""" if isinstance(needle, REGTYPE) and needle.search(haystack): return True if needle == haystack: return True return False for record in self.records: if level is not None and record.level != level: continue if channel is not None and record.channel != channel: continue if message is not None and not _match(message, record.message): continue return True return False class MailHandler(Handler, StringFormatterHandlerMixin, LimitingHandlerMixin): """A handler that sends error mails. The format string used by this handler are the contents of the mail plus the headers. This is handy if you want to use a custom subject or ``X-`` header:: handler = MailHandler(format_string='''\ Subject: {record.level_name} on My Application {record.message} {record.extra[a_custom_injected_record]} ''') This handler will always emit text-only mails for maximum portability and best performance. In the default setting it delivers all log records but it can be set up to not send more than n mails for the same record each hour to not overload an inbox and the network in case a message is triggered multiple times a minute. The following example limits it to 60 mails an hour:: from datetime import timedelta handler = MailHandler(record_limit=1, record_delta=timedelta(minutes=1)) The default timedelta is 60 seconds (one minute). The mail handler sends mails in a blocking manner. If you are not using some centralized system for logging these messages (with the help of ZeroMQ or others) and the logging system slows you down you can wrap the handler in a :class:`logbook.queues.ThreadedWrapperHandler` that will then send the mails in a background thread. `server_addr` can be a tuple of host and port, or just a string containing the host to use the default port (25, or 465 if connecting securely.) `credentials` can be a tuple or dictionary of arguments that will be passed to :py:meth:`smtplib.SMTP.login`. `secure` can be a tuple, dictionary, or boolean. As a boolean, this will simply enable or disable a secure connection. The tuple is unpacked as parameters `keyfile`, `certfile`. As a dictionary, `secure` should contain those keys. For backwards compatibility, ``secure=()`` will enable a secure connection. If `starttls` is enabled (default), these parameters will be passed to :py:meth:`smtplib.SMTP.starttls`, otherwise :py:class:`smtplib.SMTP_SSL`. .. versionchanged:: 0.3 The handler supports the batching system now. .. versionadded:: 1.0 `starttls` parameter added to allow disabling STARTTLS for SSL connections. .. versionchanged:: 1.0 If `server_addr` is a string, the default port will be used. .. versionchanged:: 1.0 `credentials` parameter can now be a dictionary of keyword arguments. .. versionchanged:: 1.0 `secure` can now be a dictionary or boolean in addition to to a tuple. """ default_format_string = MAIL_FORMAT_STRING default_related_format_string = MAIL_RELATED_FORMAT_STRING default_subject = u('Server Error in Application') #: the maximum number of record hashes in the cache for the limiting #: feature. Afterwards, record_cache_prune percent of the oldest #: entries are removed max_record_cache = 512 #: the number of items to prune on a cache overflow in percent. record_cache_prune = 0.333 def __init__(self, from_addr, recipients, subject=None, server_addr=None, credentials=None, secure=None, record_limit=None, record_delta=None, level=NOTSET, format_string=None, related_format_string=None, filter=None, bubble=False, starttls=True): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) LimitingHandlerMixin.__init__(self, record_limit, record_delta) self.from_addr = from_addr self.recipients = recipients if subject is None: subject = self.default_subject self.subject = subject self.server_addr = server_addr self.credentials = credentials self.secure = secure if related_format_string is None: related_format_string = self.default_related_format_string self.related_format_string = related_format_string self.starttls = starttls def _get_related_format_string(self): if isinstance(self.related_formatter, StringFormatter): return self.related_formatter.format_string def _set_related_format_string(self, value): if value is None: self.related_formatter = None else: self.related_formatter = self.formatter_class(value) related_format_string = property(_get_related_format_string, _set_related_format_string) del _get_related_format_string, _set_related_format_string def get_recipients(self, record): """Returns the recipients for a record. By default the :attr:`recipients` attribute is returned for all records. """ return self.recipients def message_from_record(self, record, suppressed): """Creates a new message for a record as email message object (:class:`email.message.Message`). `suppressed` is the number of mails not sent if the `record_limit` feature is active. """ from email.message import Message from email.header import Header msg = Message() msg.set_charset('utf-8') lineiter = iter(self.format(record).splitlines()) for line in lineiter: if not line: break h, v = line.split(':', 1) # We could probably just encode everything. For the moment encode # only what really needed to avoid breaking a couple of tests. try: v.encode('ascii') except UnicodeEncodeError: msg[h.strip()] = Header(v.strip(), 'utf-8') else: msg[h.strip()] = v.strip() msg.replace_header('Content-Transfer-Encoding', '8bit') body = '\r\n'.join(lineiter) if suppressed: body += ('\r\n\r\nThis message occurred additional %d ' 'time(s) and was suppressed' % suppressed) # inconsistency in Python 2.5 # other versions correctly return msg.get_payload() as str if sys.version_info < (2, 6) and isinstance(body, unicode): body = body.encode('utf-8') msg.set_payload(body, 'UTF-8') return msg def format_related_record(self, record): """Used for format the records that led up to another record or records that are related into strings. Used by the batch formatter. """ return self.related_formatter(record, self) def generate_mail(self, record, suppressed=0): """Generates the final email (:class:`email.message.Message`) with headers and date. `suppressed` is the number of mails that were not send if the `record_limit` feature is active. """ from email.utils import formatdate msg = self.message_from_record(record, suppressed) msg['From'] = self.from_addr msg['Date'] = formatdate() return msg def collapse_mails(self, mail, related, reason): """When escaling or grouped mails are """ if not related: return mail if reason == 'group': title = 'Other log records in the same group' else: title = 'Log records that led up to this one' mail.set_payload('%s\r\n\r\n\r\n%s:\r\n\r\n%s' % ( mail.get_payload(), title, '\r\n\r\n'.join(body.rstrip() for body in related) ), 'UTF-8') return mail def get_connection(self): """Returns an SMTP connection. By default it reconnects for each sent mail. """ from smtplib import SMTP, SMTP_SSL, SMTP_PORT, SMTP_SSL_PORT if self.server_addr is None: host = '127.0.0.1' port = self.secure and SMTP_SSL_PORT or SMTP_PORT else: try: host, port = self.server_addr except ValueError: # If server_addr is a string, the tuple unpacking will raise # ValueError, and we can use the default port. host = self.server_addr port = self.secure and SMTP_SSL_PORT or SMTP_PORT # Previously, self.secure was passed as con.starttls(*self.secure). This # meant that starttls couldn't be used without a keyfile and certfile # unless an empty tuple was passed. See issue #94. # # The changes below allow passing: # - secure=True for secure connection without checking identity. # - dictionary with keys 'keyfile' and 'certfile'. # - tuple to be unpacked to variables keyfile and certfile. # - secure=() equivalent to secure=True for backwards compatibility. # - secure=False equivalent to secure=None to disable. if isinstance(self.secure, collections_abc.Mapping): keyfile = self.secure.get('keyfile', None) certfile = self.secure.get('certfile', None) elif isinstance(self.secure, collections_abc.Iterable): # Allow empty tuple for backwards compatibility if len(self.secure) == 0: keyfile = certfile = None else: keyfile, certfile = self.secure else: keyfile = certfile = None # Allow starttls to be disabled by passing starttls=False. if not self.starttls and self.secure: con = SMTP_SSL(host, port, keyfile=keyfile, certfile=certfile) else: con = SMTP(host, port) if self.credentials is not None: secure = self.secure if self.starttls and secure is not None and secure is not False: con.ehlo() con.starttls(keyfile=keyfile, certfile=certfile) con.ehlo() # Allow credentials to be a tuple or dict. if isinstance(self.credentials, collections_abc.Mapping): credentials_args = () credentials_kwargs = self.credentials else: credentials_args = self.credentials credentials_kwargs = dict() con.login(*credentials_args, **credentials_kwargs) return con def close_connection(self, con): """Closes the connection that was returned by :meth:`get_connection`. """ try: if con is not None: con.quit() except Exception: pass def deliver(self, msg, recipients): """Delivers the given message to a list of recipients.""" con = self.get_connection() try: con.sendmail(self.from_addr, recipients, msg.as_string()) finally: self.close_connection(con) def emit(self, record): suppressed = 0 if self.record_limit is not None: suppressed, allow_delivery = self.check_delivery(record) if not allow_delivery: return self.deliver(self.generate_mail(record, suppressed), self.get_recipients(record)) def emit_batch(self, records, reason): if reason not in ('escalation', 'group'): raise RuntimeError("reason must be either 'escalation' or 'group'") records = list(records) if not records: return trigger = records.pop(reason == 'escalation' and -1 or 0) suppressed = 0 if self.record_limit is not None: suppressed, allow_delivery = self.check_delivery(trigger) if not allow_delivery: return trigger_mail = self.generate_mail(trigger, suppressed) related = [self.format_related_record(record) for record in records] self.deliver(self.collapse_mails(trigger_mail, related, reason), self.get_recipients(trigger)) class GMailHandler(MailHandler): """ A customized mail handler class for sending emails via GMail (or Google Apps mail):: handler = GMailHandler( "my_user@gmail.com", "mypassword", ["to_user@some_mail.com"], ...) # other arguments same as MailHandler .. versionadded:: 0.6.0 """ def __init__(self, account_id, password, recipients, **kw): super(GMailHandler, self).__init__( account_id, recipients, secure=True, server_addr=("smtp.gmail.com", 587), credentials=(account_id, password), **kw) class SyslogHandler(Handler, StringFormatterHandlerMixin): """A handler class which sends formatted logging records to a syslog server. By default it will send to it via unix socket. """ default_format_string = SYSLOG_FORMAT_STRING # priorities LOG_EMERG = 0 # system is unusable LOG_ALERT = 1 # action must be taken immediately LOG_CRIT = 2 # critical conditions LOG_ERR = 3 # error conditions LOG_WARNING = 4 # warning conditions LOG_NOTICE = 5 # normal but significant condition LOG_INFO = 6 # informational LOG_DEBUG = 7 # debug-level messages # facility codes LOG_KERN = 0 # kernel messages LOG_USER = 1 # random user-level messages LOG_MAIL = 2 # mail system LOG_DAEMON = 3 # system daemons LOG_AUTH = 4 # security/authorization messages LOG_SYSLOG = 5 # messages generated internally by syslogd LOG_LPR = 6 # line printer subsystem LOG_NEWS = 7 # network news subsystem LOG_UUCP = 8 # UUCP subsystem LOG_CRON = 9 # clock daemon LOG_AUTHPRIV = 10 # security/authorization messages (private) LOG_FTP = 11 # FTP daemon # other codes through 15 reserved for system use LOG_LOCAL0 = 16 # reserved for local use LOG_LOCAL1 = 17 # reserved for local use LOG_LOCAL2 = 18 # reserved for local use LOG_LOCAL3 = 19 # reserved for local use LOG_LOCAL4 = 20 # reserved for local use LOG_LOCAL5 = 21 # reserved for local use LOG_LOCAL6 = 22 # reserved for local use LOG_LOCAL7 = 23 # reserved for local use facility_names = { 'auth': LOG_AUTH, 'authpriv': LOG_AUTHPRIV, 'cron': LOG_CRON, 'daemon': LOG_DAEMON, 'ftp': LOG_FTP, 'kern': LOG_KERN, 'lpr': LOG_LPR, 'mail': LOG_MAIL, 'news': LOG_NEWS, 'syslog': LOG_SYSLOG, 'user': LOG_USER, 'uucp': LOG_UUCP, 'local0': LOG_LOCAL0, 'local1': LOG_LOCAL1, 'local2': LOG_LOCAL2, 'local3': LOG_LOCAL3, 'local4': LOG_LOCAL4, 'local5': LOG_LOCAL5, 'local6': LOG_LOCAL6, 'local7': LOG_LOCAL7, } level_priority_map = { DEBUG: LOG_DEBUG, INFO: LOG_INFO, NOTICE: LOG_NOTICE, WARNING: LOG_WARNING, ERROR: LOG_ERR, CRITICAL: LOG_CRIT } def __init__(self, application_name=None, address=None, facility='user', socktype=socket.SOCK_DGRAM, level=NOTSET, format_string=None, filter=None, bubble=False, record_delimiter=None): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) self.application_name = application_name if address is None: if sys.platform == 'darwin': address = '/var/run/syslog' else: address = '/dev/log' self.remote_address = self.address = address self.facility = facility self.socktype = socktype if isinstance(address, string_types): self._connect_unixsocket() self.enveloper = self.unix_envelope default_delimiter = u'\x00' else: self._connect_netsocket() self.enveloper = self.net_envelope default_delimiter = u'\n' self.record_delimiter = default_delimiter \ if record_delimiter is None else record_delimiter self.connection_exception = getattr( __builtins__, 'BrokenPipeError', socket.error) def _connect_unixsocket(self): self.unixsocket = True self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) try: self.socket.connect(self.address) except socket.error: self.socket.close() self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.socket.connect(self.address) def _connect_netsocket(self): self.unixsocket = False self.socket = socket.socket(socket.AF_INET, self.socktype) if self.socktype == socket.SOCK_STREAM: self.socket.connect(self.remote_address) self.address = self.socket.getsockname() def encode_priority(self, record): facility = self.facility_names[self.facility] priority = self.level_priority_map.get(record.level, self.LOG_WARNING) return (facility << 3) | priority def wrap_segments(self, record, before): msg = self.format(record) segments = [segment for segment in msg.split(self.record_delimiter)] return (before + segment + self.record_delimiter for segment in segments) def unix_envelope(self, record): before = u'<{}>{}'.format( self.encode_priority(record), self.application_name + ':' if self.application_name else '') return self.wrap_segments(record, before) def net_envelope(self, record): # Gross but effective try: format_string = self.format_string application_name = self.application_name if not application_name and record.channel and \ '{record.channel}: ' in format_string: self.format_string = format_string.replace( '{record.channel}: ', '') self.application_name = record.channel # RFC 5424: version timestamp hostname app-name procid # msgid structured-data message before = u'<{}>1 {}Z {} {} {} - - '.format( self.encode_priority(record), record.time.isoformat(), socket.gethostname(), self.application_name if self.application_name else '-', record.process) return self.wrap_segments(record, before) finally: self.format_string = format_string self.application_name = application_name def emit(self, record): for segment in self.enveloper(record): self.send_to_socket(segment.encode('utf-8')) def send_to_socket(self, data): if self.unixsocket: try: self.socket.send(data) except socket.error: self._connect_unixsocket() self.socket.send(data) elif self.socktype == socket.SOCK_DGRAM: # the flags are no longer optional on Python 3 self.socket.sendto(data, 0, self.address) else: try: self.socket.sendall(data) except self.connection_exception: self._connect_netsocket() self.socket.send(data) def close(self): self.socket.close() class NTEventLogHandler(Handler, StringFormatterHandlerMixin): """A handler that sends to the NT event log system.""" dllname = None default_format_string = NTLOG_FORMAT_STRING def __init__(self, application_name, log_type='Application', level=NOTSET, format_string=None, filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) if os.name != 'nt': raise RuntimeError('NTLogEventLogHandler requires a Windows ' 'operating system.') try: import win32evtlogutil import win32evtlog except ImportError: raise RuntimeError('The pywin32 library is required ' 'for the NTEventLogHandler.') self.application_name = application_name self._welu = win32evtlogutil dllname = self.dllname if not dllname: dllname = os.path.join(os.path.dirname(self._welu.__file__), '../win32service.pyd') self.log_type = log_type self._welu.AddSourceToRegistry(self.application_name, dllname, log_type) self._default_type = win32evtlog.EVENTLOG_INFORMATION_TYPE self._type_map = { DEBUG: win32evtlog.EVENTLOG_INFORMATION_TYPE, INFO: win32evtlog.EVENTLOG_INFORMATION_TYPE, NOTICE: win32evtlog.EVENTLOG_INFORMATION_TYPE, WARNING: win32evtlog.EVENTLOG_WARNING_TYPE, ERROR: win32evtlog.EVENTLOG_ERROR_TYPE, CRITICAL: win32evtlog.EVENTLOG_ERROR_TYPE } def unregister_logger(self): """Removes the application binding from the registry. If you call this, the log viewer will no longer be able to provide any information about the message. """ self._welu.RemoveSourceFromRegistry(self.application_name, self.log_type) def get_event_type(self, record): return self._type_map.get(record.level, self._default_type) def get_event_category(self, record): """Returns the event category for the record. Override this if you want to specify your own categories. This version returns 0. """ return 0 def get_message_id(self, record): """Returns the message ID (EventID) for the record. Override this if you want to specify your own ID. This version returns 1. """ return 1 def emit(self, record): id = self.get_message_id(record) cat = self.get_event_category(record) type = self.get_event_type(record) self._welu.ReportEvent(self.application_name, id, cat, type, [self.format(record)]) class FingersCrossedHandler(Handler): """This handler wraps another handler and will log everything in memory until a certain level (`action_level`, defaults to `ERROR`) is exceeded. When that happens the fingers crossed handler will activate forever and log all buffered records as well as records yet to come into another handled which was passed to the constructor. Alternatively it's also possible to pass a factory function to the constructor instead of a handler. That factory is then called with the triggering log entry and the finger crossed handler to create a handler which is then cached. The idea of this handler is to enable debugging of live systems. For example it might happen that code works perfectly fine 99% of the time, but then some exception happens. But the error that caused the exception alone might not be the interesting bit, the interesting information were the warnings that lead to the error. Here a setup that enables this for a web application:: from logbook import FileHandler from logbook import FingersCrossedHandler def issue_logging(): def factory(record, handler): return FileHandler('/var/log/app/issue-%s.log' % record.time) return FingersCrossedHandler(factory) def application(environ, start_response): with issue_logging(): return the_actual_wsgi_application(environ, start_response) Whenever an error occours, a new file in ``/var/log/app`` is created with all the logging calls that lead up to the error up to the point where the `with` block is exited. Please keep in mind that the :class:`~logbook.FingersCrossedHandler` handler is a one-time handler. Once triggered, it will not reset. Because of that you will have to re-create it whenever you bind it. In this case the handler is created when it's bound to the thread. Due to how the handler is implemented, the filter, bubble and level flags of the wrapped handler are ignored. .. versionchanged:: 0.3 The default behaviour is to buffer up records and then invoke another handler when a severity theshold was reached with the buffer emitting. This now enables this logger to be properly used with the :class:`~logbook.MailHandler`. You will now only get one mail for each buffered record. However once the threshold was reached you would still get a mail for each record which is why the `reset` flag was added. When set to `True`, the handler will instantly reset to the untriggered state and start buffering again:: handler = FingersCrossedHandler(MailHandler(...), buffer_size=10, reset=True) .. versionadded:: 0.3 The `reset` flag was added. """ #: the reason to be used for the batch emit. The default is #: ``'escalation'``. #: #: .. versionadded:: 0.3 batch_emit_reason = 'escalation' def __init__(self, handler, action_level=ERROR, buffer_size=0, pull_information=True, reset=False, filter=None, bubble=False): Handler.__init__(self, NOTSET, filter, bubble) self.lock = new_fine_grained_lock() self._level = action_level if isinstance(handler, Handler): self._handler = handler self._handler_factory = None else: self._handler = None self._handler_factory = handler #: the buffered records of the handler. Once the action is triggered #: (:attr:`triggered`) this list will be None. This attribute can #: be helpful for the handler factory function to select a proper #: filename (for example time of first log record) self.buffered_records = deque() #: the maximum number of entries in the buffer. If this is exhausted #: the oldest entries will be discarded to make place for new ones self.buffer_size = buffer_size self._buffer_full = False self._pull_information = pull_information self._action_triggered = False self._reset = reset def close(self): if self._handler is not None: self._handler.close() def enqueue(self, record): if self._pull_information: record.pull_information() if self._action_triggered: self._handler.emit(record) else: self.buffered_records.append(record) if self._buffer_full: self.buffered_records.popleft() elif (self.buffer_size and len(self.buffered_records) >= self.buffer_size): self._buffer_full = True return record.level >= self._level return False def rollover(self, record): if self._handler is None: self._handler = self._handler_factory(record, self) self._handler.emit_batch(iter(self.buffered_records), 'escalation') self.buffered_records.clear() self._action_triggered = not self._reset @property def triggered(self): """This attribute is `True` when the action was triggered. From this point onwards the finger crossed handler transparently forwards all log records to the inner handler. If the handler resets itself this will always be `False`. """ return self._action_triggered def emit(self, record): self.lock.acquire() try: if self.enqueue(record): self.rollover(record) finally: self.lock.release() class GroupHandler(WrapperHandler): """A handler that buffers all messages until it is popped again and then forwards all messages to another handler. This is useful if you for example have an application that does computations and only a result mail is required. A group handler makes sure that only one mail is sent and not multiple. Some other handles might support this as well, though currently none of the builtins do. Example:: with GroupHandler(MailHandler(...)): # everything here ends up in the mail The :class:`GroupHandler` is implemented as a :class:`WrapperHandler` thus forwarding all attributes of the wrapper handler. Notice that this handler really only emit the records when the handler is popped from the stack. .. versionadded:: 0.3 """ _direct_attrs = frozenset(['handler', 'pull_information', 'buffered_records']) def __init__(self, handler, pull_information=True): WrapperHandler.__init__(self, handler) self.pull_information = pull_information self.buffered_records = [] def rollover(self): self.handler.emit_batch(self.buffered_records, 'group') self.buffered_records = [] def pop_application(self): Handler.pop_application(self) self.rollover() def pop_thread(self): Handler.pop_thread(self) self.rollover() def pop_context(self): Handler.pop_context(self) self.rollover() def pop_greenlet(self): Handler.pop_greenlet(self) self.rollover() def emit(self, record): if self.pull_information: record.pull_information() self.buffered_records.append(record) logbook-1.5.3/logbook/helpers.py000066400000000000000000000203001355165376200166340ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ logbook.helpers ~~~~~~~~~~~~~~~ Various helper functions :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ import os import re import sys import errno import time import random from datetime import datetime, timedelta PY2 = sys.version_info[0] == 2 if PY2: import __builtin__ as _builtins import collections as collections_abc else: import builtins as _builtins import collections.abc as collections_abc try: import json except ImportError: import simplejson as json if PY2: from cStringIO import StringIO iteritems = dict.iteritems from itertools import izip as zip xrange = _builtins.xrange else: from io import StringIO zip = _builtins.zip xrange = range iteritems = dict.items _IDENTITY = lambda obj: obj if PY2: def u(s): return unicode(s, "unicode_escape") else: u = _IDENTITY if PY2: integer_types = (int, long) string_types = (basestring,) else: integer_types = (int,) string_types = (str,) if PY2: import httplib as http_client else: from http import client as http_client if PY2: # Yucky, but apparently that's the only way to do this exec(""" def reraise(tp, value, tb=None): raise tp, value, tb """, locals(), globals()) else: def reraise(tp, value, tb=None): if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value # this regexp also matches incompatible dates like 20070101 because # some libraries (like the python xmlrpclib modules) use this _iso8601_re = re.compile( # date r'(\d{4})(?:-?(\d{2})(?:-?(\d{2}))?)?' # time r'(?:T(\d{2}):(\d{2})(?::(\d{2}(?:\.\d+)?))?(Z|[+-]\d{2}:\d{2})?)?$' ) _missing = object() if PY2: def b(x): return x def _is_text_stream(x): return True else: import io def b(x): return x.encode('ascii') def _is_text_stream(stream): return isinstance(stream, io.TextIOBase) can_rename_open_file = False if os.name == 'nt': try: import ctypes _MOVEFILE_REPLACE_EXISTING = 0x1 _MOVEFILE_WRITE_THROUGH = 0x8 _MoveFileEx = ctypes.windll.kernel32.MoveFileExW def _rename(src, dst): if PY2: if not isinstance(src, unicode): src = unicode(src, sys.getfilesystemencoding()) if not isinstance(dst, unicode): dst = unicode(dst, sys.getfilesystemencoding()) if _rename_atomic(src, dst): return True retry = 0 rv = False while not rv and retry < 100: rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH) if not rv: time.sleep(0.001) retry += 1 return rv # new in Vista and Windows Server 2008 _CreateTransaction = ctypes.windll.ktmw32.CreateTransaction _CommitTransaction = ctypes.windll.ktmw32.CommitTransaction _MoveFileTransacted = ctypes.windll.kernel32.MoveFileTransactedW _CloseHandle = ctypes.windll.kernel32.CloseHandle can_rename_open_file = True def _rename_atomic(src, dst): ta = _CreateTransaction(None, 0, 0, 0, 0, 1000, 'Logbook rename') if ta == -1: return False try: retry = 0 rv = False while not rv and retry < 100: rv = _MoveFileTransacted(src, dst, None, None, _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH, ta) if rv: rv = _CommitTransaction(ta) break else: time.sleep(0.001) retry += 1 return rv finally: _CloseHandle(ta) except Exception: def _rename(src, dst): return False def _rename_atomic(src, dst): return False def rename(src, dst): # Try atomic or pseudo-atomic rename if _rename(src, dst): return # Fall back to "move away and replace" try: os.rename(src, dst) except OSError: e = sys.exc_info()[1] if e.errno not in (errno.EEXIST, errno.EACCES): raise old = "%s-%08x" % (dst, random.randint(0, 2 ** 31 - 1)) os.rename(dst, old) os.rename(src, dst) try: os.unlink(old) except Exception: pass else: rename = os.rename can_rename_open_file = True _JSON_SIMPLE_TYPES = (bool, float) + integer_types + string_types def to_safe_json(data): """Makes a data structure safe for JSON silently discarding invalid objects from nested structures. This also converts dates. """ def _convert(obj): if obj is None: return None elif PY2 and isinstance(obj, str): return obj.decode('utf-8', 'replace') elif isinstance(obj, _JSON_SIMPLE_TYPES): return obj elif isinstance(obj, datetime): return format_iso8601(obj) elif isinstance(obj, list): return [_convert(x) for x in obj] elif isinstance(obj, tuple): return tuple(_convert(x) for x in obj) elif isinstance(obj, dict): rv = {} for key, value in iteritems(obj): if not isinstance(key, string_types): key = str(key) if not is_unicode(key): key = u(key) rv[key] = _convert(value) return rv return _convert(data) def format_iso8601(d=None): """Returns a date in iso8601 format.""" if d is None: d = datetime.utcnow() rv = d.strftime('%Y-%m-%dT%H:%M:%S') if d.microsecond: rv += '.' + str(d.microsecond) return rv + 'Z' def parse_iso8601(value): """Parse an iso8601 date into a datetime object. The timezone is normalized to UTC. """ m = _iso8601_re.match(value) if m is None: raise ValueError('not a valid iso8601 date value') groups = m.groups() args = [] for group in groups[:-2]: if group is not None: group = int(group) args.append(group) seconds = groups[-2] if seconds is not None: if '.' in seconds: sec, usec = seconds.split('.') args.append(int(sec)) args.append(int(usec.ljust(6, '0'))) else: args.append(int(seconds)) rv = datetime(*args) tz = groups[-1] if tz and tz != 'Z': args = [int(x) for x in tz[1:].split(':')] delta = timedelta(hours=args[0], minutes=args[1]) if tz[0] == '+': rv -= delta else: rv += delta return rv def get_application_name(): if not sys.argv or not sys.argv[0]: return 'Python' return os.path.basename(sys.argv[0]).title() class cached_property(object): """A property that is lazily calculated and then cached.""" def __init__(self, func, name=None, doc=None): self.__name__ = name or func.__name__ self.__module__ = func.__module__ self.__doc__ = doc or func.__doc__ self.func = func def __get__(self, obj, type=None): if obj is None: return self value = obj.__dict__.get(self.__name__, _missing) if value is _missing: value = self.func(obj) obj.__dict__[self.__name__] = value return value def get_iterator_next_method(it): return lambda: next(it) # python 2 support functions and aliases def is_unicode(x): if PY2: return isinstance(x, unicode) return isinstance(x, str) if PY2: exec("""def with_metaclass(meta): class _WithMetaclassBase(object): __metaclass__ = meta return _WithMetaclassBase """) else: exec("""def with_metaclass(meta): class _WithMetaclassBase(object, metaclass=meta): pass return _WithMetaclassBase """) logbook-1.5.3/logbook/more.py000066400000000000000000000465001355165376200161460ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ logbook.more ~~~~~~~~~~~~ Fancy stuff for logbook. :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ import re import os import platform from collections import defaultdict from functools import partial from logbook.base import ( RecordDispatcher, dispatch_record, NOTSET, ERROR, NOTICE) from logbook.handlers import ( Handler, StringFormatter, StringFormatterHandlerMixin, StderrHandler) from logbook._termcolors import colorize from logbook.helpers import PY2, string_types, iteritems, u from logbook.ticketing import TicketingHandler as DatabaseHandler from logbook.ticketing import BackendBase try: import riemann_client.client import riemann_client.transport except ImportError: riemann_client = None #from riemann_client.transport import TCPTransport, UDPTransport, BlankTransport if PY2: from urllib import urlencode from urlparse import parse_qsl else: from urllib.parse import parse_qsl, urlencode _ws_re = re.compile(r'(\s+)', re.UNICODE) TWITTER_FORMAT_STRING = u( '[{record.channel}] {record.level_name}: {record.message}') TWITTER_ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token' NEW_TWEET_URL = 'https://api.twitter.com/1/statuses/update.json' class CouchDBBackend(BackendBase): """Implements a backend that writes into a CouchDB database. """ def setup_backend(self): from couchdb import Server uri = self.options.pop('uri', u('')) couch = Server(uri) db_name = self.options.pop('db') self.database = couch[db_name] def record_ticket(self, record, data, hash, app_id): """Records a log record as ticket. """ db = self.database ticket = record.to_dict() ticket["time"] = ticket["time"].isoformat() + "Z" ticket_id, _ = db.save(ticket) db.save(ticket) class TwitterFormatter(StringFormatter): """Works like the standard string formatter and is used by the :class:`TwitterHandler` unless changed. """ max_length = 140 def format_exception(self, record): return u('%s: %s') % (record.exception_shortname, record.exception_message) def __call__(self, record, handler): formatted = StringFormatter.__call__(self, record, handler) rv = [] length = 0 for piece in _ws_re.split(formatted): length += len(piece) if length > self.max_length: if length - len(piece) < self.max_length: rv.append(u('…')) break rv.append(piece) return u('').join(rv) class TaggingLogger(RecordDispatcher): """A logger that attaches a tag to each record. This is an alternative record dispatcher that does not use levels but tags to keep log records apart. It is constructed with a descriptive name and at least one tag. The tags are up for you to define:: logger = TaggingLogger('My Logger', ['info', 'warning']) For each tag defined that way, a method appears on the logger with that name:: logger.info('This is a info message') To dispatch to different handlers based on tags you can use the :class:`TaggingHandler`. The tags themselves are stored as list named ``'tags'`` in the :attr:`~logbook.LogRecord.extra` dictionary. """ def __init__(self, name=None, tags=None): RecordDispatcher.__init__(self, name) # create a method for each tag named for tag in (tags or ()): setattr(self, tag, partial(self.log, tag)) def log(self, tags, msg, *args, **kwargs): if isinstance(tags, string_types): tags = [tags] exc_info = kwargs.pop('exc_info', None) extra = kwargs.pop('extra', {}) extra['tags'] = list(tags) frame_correction = kwargs.pop('frame_correction', 0) return self.make_record_and_handle(NOTSET, msg, args, kwargs, exc_info, extra, frame_correction) class TaggingHandler(Handler): """A handler that logs for tags and dispatches based on those. Example:: import logbook from logbook.more import TaggingHandler handler = TaggingHandler(dict( info=OneHandler(), warning=AnotherHandler() )) """ def __init__(self, handlers, filter=None, bubble=False): Handler.__init__(self, NOTSET, filter, bubble) assert isinstance(handlers, dict) self._handlers = dict( (tag, isinstance(handler, Handler) and [handler] or handler) for (tag, handler) in iteritems(handlers)) def emit(self, record): for tag in record.extra.get('tags', ()): for handler in self._handlers.get(tag, ()): handler.handle(record) class TwitterHandler(Handler, StringFormatterHandlerMixin): """A handler that logs to twitter. Requires that you sign up an application on twitter and request xauth support. Furthermore the oauth2 library has to be installed. If you don't want to register your own application and request xauth credentials, there are a couple of leaked consumer key and secret pairs from application explicitly whitelisted at Twitter (`leaked secrets `_). """ default_format_string = TWITTER_FORMAT_STRING formatter_class = TwitterFormatter def __init__(self, consumer_key, consumer_secret, username, password, level=NOTSET, format_string=None, filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) self.consumer_key = consumer_key self.consumer_secret = consumer_secret self.username = username self.password = password try: import oauth2 except ImportError: raise RuntimeError('The python-oauth2 library is required for ' 'the TwitterHandler.') self._oauth = oauth2 self._oauth_token = None self._oauth_token_secret = None self._consumer = oauth2.Consumer(consumer_key, consumer_secret) self._client = oauth2.Client(self._consumer) def get_oauth_token(self): """Returns the oauth access token.""" if self._oauth_token is None: resp, content = self._client.request( TWITTER_ACCESS_TOKEN_URL + '?', 'POST', body=urlencode({ 'x_auth_username': self.username.encode('utf-8'), 'x_auth_password': self.password.encode('utf-8'), 'x_auth_mode': 'client_auth' }), headers={'Content-Type': 'application/x-www-form-urlencoded'} ) if resp['status'] != '200': raise RuntimeError('unable to login to Twitter') data = dict(parse_qsl(content)) self._oauth_token = data['oauth_token'] self._oauth_token_secret = data['oauth_token_secret'] return self._oauth.Token(self._oauth_token, self._oauth_token_secret) def make_client(self): """Creates a new oauth client auth a new access token.""" return self._oauth.Client(self._consumer, self.get_oauth_token()) def tweet(self, status): """Tweets a given status. Status must not exceed 140 chars.""" client = self.make_client() resp, content = client.request( NEW_TWEET_URL, 'POST', body=urlencode({'status': status.encode('utf-8')}), headers={'Content-Type': 'application/x-www-form-urlencoded'}) return resp['status'] == '200' def emit(self, record): self.tweet(self.format(record)) class SlackHandler(Handler, StringFormatterHandlerMixin): """A handler that logs to slack. Requires that you sign up an application on slack and request an api token. Furthermore the slacker library has to be installed. """ def __init__(self, api_token, channel, level=NOTSET, format_string=None, filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) self.api_token = api_token try: from slacker import Slacker except ImportError: raise RuntimeError('The slacker library is required for ' 'the SlackHandler.') self.channel = channel self.slack = Slacker(api_token) def emit(self, record): self.slack.chat.post_message(channel=self.channel, text=self.format(record)) class JinjaFormatter(object): """A formatter object that makes it easy to format using a Jinja 2 template instead of a format string. """ def __init__(self, template): try: from jinja2 import Template except ImportError: raise RuntimeError('The jinja2 library is required for ' 'the JinjaFormatter.') self.template = Template(template) def __call__(self, record, handler): return self.template.render(record=record, handler=handler) class ExternalApplicationHandler(Handler): """This handler invokes an external application to send parts of the log record to. The constructor takes a list of arguments that are passed to another application where each of the arguments is a format string, and optionally a format string for data that is passed to stdin. For example it can be used to invoke the ``say`` command on OS X:: from logbook.more import ExternalApplicationHandler say_handler = ExternalApplicationHandler(['say', '{record.message}']) Note that the above example is blocking until ``say`` finished, so it's recommended to combine this handler with the :class:`logbook.ThreadedWrapperHandler` to move the execution into a background thread. .. versionadded:: 0.3 """ def __init__(self, arguments, stdin_format=None, encoding='utf-8', level=NOTSET, filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) self.encoding = encoding self._arguments = list(arguments) if stdin_format is not None: stdin_format = stdin_format self._stdin_format = stdin_format import subprocess self._subprocess = subprocess def emit(self, record): args = [arg.format(record=record) for arg in self._arguments] if self._stdin_format is not None: stdin_data = (self._stdin_format.format(record=record) .encode(self.encoding)) stdin = self._subprocess.PIPE else: stdin = None c = self._subprocess.Popen(args, stdin=stdin) if stdin is not None: c.communicate(stdin_data) c.wait() class ColorizingStreamHandlerMixin(object): """A mixin class that does colorizing. .. versionadded:: 0.3 .. versionchanged:: 1.0.0 Added Windows support if `colorama`_ is installed. .. _`colorama`: https://pypi.org/pypi/colorama """ _use_color = None def force_color(self): """Force colorizing the stream (`should_colorize` will return True) """ self._use_color = True def forbid_color(self): """Forbid colorizing the stream (`should_colorize` will return False) """ self._use_color = False def should_colorize(self, record): """Returns `True` if colorizing should be applied to this record. The default implementation returns `True` if the stream is a tty. If we are executing on Windows, colorama must be installed. """ if os.name == 'nt': try: import colorama except ImportError: return False if self._use_color is not None: return self._use_color isatty = getattr(self.stream, 'isatty', None) return isatty and isatty() def get_color(self, record): """Returns the color for this record.""" if record.level >= ERROR: return 'red' elif record.level >= NOTICE: return 'yellow' return 'lightgray' def format(self, record): rv = super(ColorizingStreamHandlerMixin, self).format(record) if self.should_colorize(record): color = self.get_color(record) if color: rv = colorize(color, rv) return rv class ColorizedStderrHandler(ColorizingStreamHandlerMixin, StderrHandler): """A colorizing stream handler that writes to stderr. It will only colorize if a terminal was detected. Note that this handler does not colorize on Windows systems. .. versionadded:: 0.3 .. versionchanged:: 1.0 Added Windows support if `colorama`_ is installed. .. _`colorama`: https://pypi.org/pypi/colorama """ def __init__(self, *args, **kwargs): StderrHandler.__init__(self, *args, **kwargs) # Try import colorama so that we work on Windows. colorama.init is a # noop on other operating systems. try: import colorama except ImportError: pass else: colorama.init() # backwards compat. Should go away in some future releases from logbook.handlers import ( FingersCrossedHandler as FingersCrossedHandlerBase) class FingersCrossedHandler(FingersCrossedHandlerBase): def __init__(self, *args, **kwargs): FingersCrossedHandlerBase.__init__(self, *args, **kwargs) from warnings import warn warn(PendingDeprecationWarning('fingers crossed handler changed ' 'location. It\'s now a core component of Logbook.')) class ExceptionHandler(Handler, StringFormatterHandlerMixin): """An exception handler which raises exceptions of the given `exc_type`. This is especially useful if you set a specific error `level` e.g. to treat warnings as exceptions:: from logbook.more import ExceptionHandler class ApplicationWarning(Exception): pass exc_handler = ExceptionHandler(ApplicationWarning, level='WARNING') .. versionadded:: 0.3 """ def __init__(self, exc_type, level=NOTSET, format_string=None, filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) self.exc_type = exc_type def handle(self, record): if self.should_handle(record): raise self.exc_type(self.format(record)) return False class DedupHandler(Handler): """A handler that deduplicates log messages. It emits each unique log record once, along with the number of times it was emitted. Example::: with logbook.more.DedupHandler(): logbook.error('foo') logbook.error('bar') logbook.error('foo') The expected output::: message repeated 2 times: foo message repeated 1 times: bar """ def __init__(self, format_string='message repeated {count} times: {message}', *args, **kwargs): Handler.__init__(self, bubble=False, *args, **kwargs) self._format_string = format_string self.clear() def clear(self): self._message_to_count = defaultdict(int) self._unique_ordered_records = [] def pop_application(self): Handler.pop_application(self) self.flush() def pop_thread(self): Handler.pop_thread(self) self.flush() def pop_context(self): Handler.pop_context(self) self.flush() def pop_greenlet(self): Handler.pop_greenlet(self) self.flush() def handle(self, record): if record.message not in self._message_to_count: self._unique_ordered_records.append(record) self._message_to_count[record.message] += 1 return True def flush(self): for record in self._unique_ordered_records: record.message = self._format_string.format( message=record.message, count=self._message_to_count[record.message]) # record.dispatcher is the logger who created the message, # it's sometimes supressed (by logbook.info for example) if record.dispatcher is not None: dispatch = record.dispatcher.call_handlers else: dispatch = dispatch_record dispatch(record) self.clear() class RiemannHandler(Handler): """ A handler that sends logs as events to Riemann. """ def __init__(self, host, port, message_type="tcp", ttl=60, flush_threshold=10, bubble=False, filter=None, level=NOTSET): """ :param host: riemann host :param port: riemann port :param message_type: selects transport. Currently available 'tcp' and 'udp' :param ttl: defines time to live in riemann :param flush_threshold: count of events after which we send to riemann """ if riemann_client is None: raise NotImplementedError("The Riemann handler requires the riemann_client package") # pragma: no cover Handler.__init__(self, level, filter, bubble) self.host = host self.port = port self.ttl = ttl self.queue = [] self.flush_threshold = flush_threshold if message_type == "tcp": self.transport = riemann_client.transport.TCPTransport elif message_type == "udp": self.transport = riemann_client.transport.UDPTransport elif message_type == "test": self.transport = riemann_client.transport.BlankTransport else: msg = ("Currently supported message types for RiemannHandler are: {0}. \ {1} is not supported." .format(",".join(["tcp", "udp", "test"]), message_type)) raise RuntimeError(msg) def record_to_event(self, record): from time import time tags = ["log", record.level_name] msg = str(record.exc_info[1]) if record.exc_info else record.msg channel_name = str(record.channel) if record.channel else "unknown" if any([record.level_name == keywords for keywords in ["ERROR", "EXCEPTION"]]): state = "error" else: state = "ok" return {"metric_f": 1.0, "tags": tags, "description": msg, "time": int(time()), "ttl": self.ttl, "host": platform.node(), "service": "{0}.{1}".format(channel_name, os.getpid()), "state": state } def _flush_events(self): with riemann_client.client.QueuedClient(self.transport(self.host, self.port)) as cl: for event in self.queue: cl.event(**event) cl.flush() self.queue = [] def emit(self, record): self.queue.append(self.record_to_event(record)) if len(self.queue) == self.flush_threshold: self._flush_events() logbook-1.5.3/logbook/notifiers.py000066400000000000000000000274031355165376200172070ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ logbook.notifiers ~~~~~~~~~~~~~~~~~ System notify handlers for OSX and Linux. :copyright: (c) 2010 by Armin Ronacher, Christopher Grebs. :license: BSD, see LICENSE for more details. """ import os import sys import base64 from time import time from logbook.base import NOTSET, ERROR, WARNING from logbook.handlers import Handler, LimitingHandlerMixin from logbook.helpers import get_application_name, PY2, http_client, u if PY2: from urllib import urlencode else: from urllib.parse import urlencode def create_notification_handler(application_name=None, level=NOTSET, icon=None): """Creates a handler perfectly fit the current platform. On Linux systems this creates a :class:`LibNotifyHandler`, on OS X systems it will create a :class:`GrowlHandler`. """ if sys.platform == 'darwin': return GrowlHandler(application_name, level=level, icon=icon) return LibNotifyHandler(application_name, level=level, icon=icon) class NotificationBaseHandler(Handler, LimitingHandlerMixin): """Baseclass for notification handlers.""" def __init__(self, application_name=None, record_limit=None, record_delta=None, level=NOTSET, filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) LimitingHandlerMixin.__init__(self, record_limit, record_delta) if application_name is None: application_name = get_application_name() self.application_name = application_name def make_title(self, record): """Called to get the title from the record.""" return u('%s: %s') % (record.channel, record.level_name.title()) def make_text(self, record): """Called to get the text of the record.""" return record.message class GrowlHandler(NotificationBaseHandler): """A handler that dispatches to Growl. Requires that either growl-py or py-Growl are installed. """ def __init__(self, application_name=None, icon=None, host=None, password=None, record_limit=None, record_delta=None, level=NOTSET, filter=None, bubble=False): NotificationBaseHandler.__init__(self, application_name, record_limit, record_delta, level, filter, bubble) # growl is using the deprecated md5 module, but we really don't need # to see that deprecation warning from warnings import filterwarnings filterwarnings(module='Growl', category=DeprecationWarning, action='ignore') try: import Growl self._growl = Growl except ImportError: raise RuntimeError('The growl module is not available. You have ' 'to install either growl-py or py-Growl to ' 'use the GrowlHandler.') if icon is not None: if not os.path.isfile(icon): raise IOError('Filename to an icon expected.') icon = self._growl.Image.imageFromPath(icon) else: try: icon = self._growl.Image.imageWithIconForCurrentApplication() except TypeError: icon = None self._notifier = self._growl.GrowlNotifier( applicationName=self.application_name, applicationIcon=icon, notifications=['Notset', 'Debug', 'Info', 'Notice', 'Warning', 'Error', 'Critical'], hostname=host, password=password ) self._notifier.register() def is_sticky(self, record): """Returns `True` if the sticky flag should be set for this record. The default implementation marks errors and criticals sticky. """ return record.level >= ERROR def get_priority(self, record): """Returns the priority flag for Growl. Errors and criticals are get highest priority (2), warnings get higher priority (1) and the rest gets 0. Growl allows values between -2 and 2. """ if record.level >= ERROR: return 2 elif record.level == WARNING: return 1 return 0 def emit(self, record): if not self.check_delivery(record)[1]: return self._notifier.notify(record.level_name.title(), self.make_title(record), self.make_text(record), sticky=self.is_sticky(record), priority=self.get_priority(record)) class LibNotifyHandler(NotificationBaseHandler): """A handler that dispatches to libnotify. Requires pynotify installed. If `no_init` is set to `True` the initialization of libnotify is skipped. """ def __init__(self, application_name=None, icon=None, no_init=False, record_limit=None, record_delta=None, level=NOTSET, filter=None, bubble=False): NotificationBaseHandler.__init__(self, application_name, record_limit, record_delta, level, filter, bubble) try: import pynotify self._pynotify = pynotify except ImportError: raise RuntimeError('The pynotify library is required for ' 'the LibNotifyHandler.') self.icon = icon if not no_init: pynotify.init(self.application_name) def set_notifier_icon(self, notifier, icon): """Used to attach an icon on a notifier object.""" try: from gtk import gdk except ImportError: # TODO: raise a warning? raise RuntimeError('The gtk.gdk module is required to set an icon.') if icon is not None: if not isinstance(icon, gdk.Pixbuf): icon = gdk.pixbuf_new_from_file(icon) notifier.set_icon_from_pixbuf(icon) def get_expires(self, record): """Returns either EXPIRES_DEFAULT or EXPIRES_NEVER for this record. The default implementation marks errors and criticals as EXPIRES_NEVER. """ pn = self._pynotify return pn.EXPIRES_NEVER if record.level >= ERROR else pn.EXPIRES_DEFAULT def get_urgency(self, record): """Returns the urgency flag for pynotify. Errors and criticals are get highest urgency (CRITICAL), warnings get higher priority (NORMAL) and the rest gets LOW. """ pn = self._pynotify if record.level >= ERROR: return pn.URGENCY_CRITICAL elif record.level == WARNING: return pn.URGENCY_NORMAL return pn.URGENCY_LOW def emit(self, record): if not self.check_delivery(record)[1]: return notifier = self._pynotify.Notification(self.make_title(record), self.make_text(record)) notifier.set_urgency(self.get_urgency(record)) notifier.set_timeout(self.get_expires(record)) self.set_notifier_icon(notifier, self.icon) notifier.show() class BoxcarHandler(NotificationBaseHandler): """Sends notifications to boxcar.io. Can be forwarded to your iPhone or other compatible device. """ api_url = 'https://boxcar.io/notifications/' def __init__(self, email, password, record_limit=None, record_delta=None, level=NOTSET, filter=None, bubble=False): NotificationBaseHandler.__init__(self, None, record_limit, record_delta, level, filter, bubble) self.email = email self.password = password def get_screen_name(self, record): """Returns the value of the screen name field.""" return record.level_name.title() def emit(self, record): if not self.check_delivery(record)[1]: return body = urlencode({ 'notification[from_screen_name]': self.get_screen_name(record).encode('utf-8'), 'notification[message]': self.make_text(record).encode('utf-8'), 'notification[from_remote_service_id]': str(int(time() * 100)) }) con = http_client.HTTPSConnection('boxcar.io') con.request('POST', '/notifications/', headers={ 'Authorization': 'Basic ' + base64.b64encode((u('%s:%s') % (self.email, self.password)) .encode('utf-8')).strip(), }, body=body) con.close() class NotifoHandler(NotificationBaseHandler): """Sends notifications to notifo.com. Can be forwarded to your Desktop, iPhone, or other compatible device. """ def __init__(self, application_name=None, username=None, secret=None, record_limit=None, record_delta=None, level=NOTSET, filter=None, bubble=False, hide_level=False): try: import notifo except ImportError: raise RuntimeError( 'The notifo module is not available. You have ' 'to install notifo to use the NotifoHandler.' ) NotificationBaseHandler.__init__(self, None, record_limit, record_delta, level, filter, bubble) self._notifo = notifo self.application_name = application_name self.username = username self.secret = secret self.hide_level = hide_level def emit(self, record): if self.hide_level: _level_name = None else: _level_name = self.level_name self._notifo.send_notification(self.username, self.secret, None, record.message, self.application_name, _level_name, None) class PushoverHandler(NotificationBaseHandler): """Sends notifications to pushover.net. Can be forwarded to your Desktop, iPhone, or other compatible device. If `priority` is not one of -2, -1, 0, or 1, it is set to 0 automatically. """ def __init__(self, application_name=None, apikey=None, userkey=None, device=None, priority=0, sound=None, record_limit=None, record_delta=None, level=NOTSET, filter=None, bubble=False, max_title_len=100, max_message_len=512): super(PushoverHandler, self).__init__(None, record_limit, record_delta, level, filter, bubble) self.application_name = application_name self.apikey = apikey self.userkey = userkey self.device = device self.priority = priority self.sound = sound self.max_title_len = max_title_len self.max_message_len = max_message_len if self.application_name is None: self.title = None else: self.title = self._crop(self.application_name, self.max_title_len) if self.priority not in [-2, -1, 0, 1]: self.priority = 0 def _crop(self, msg, max_len): if max_len is not None and max_len > 0 and len(msg) > max_len: return "%s..." % (msg[:max_len-3],) else: return msg def emit(self, record): message = self._crop(record.message, self.max_message_len) body_dict = { 'token': self.apikey, 'user': self.userkey, 'message': message, 'priority': self.priority } if self.title is not None: body_dict['title'] = self.title if self.device is not None: body_dict['device'] = self.device if self.sound is not None: body_dict['sound'] = self.sound body = urlencode(body_dict) con = http_client.HTTPSConnection('api.pushover.net') con.request('POST', '/1/messages.json', body=body) con.close() logbook-1.5.3/logbook/queues.py000066400000000000000000000601271355165376200165140ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ logbook.queues ~~~~~~~~~~~~~~ This module implements queue backends. :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ import json import threading from threading import Thread, Lock import platform from logbook.base import NOTSET, LogRecord, dispatch_record from logbook.handlers import Handler, WrapperHandler from logbook.helpers import PY2, u if PY2: from Queue import Empty, Full, Queue as ThreadQueue else: from queue import Empty, Full, Queue as ThreadQueue class RedisHandler(Handler): """A handler that sends log messages to a Redis instance. It publishes each record as json dump. Requires redis module. To receive such records you need to have a running instance of Redis. Example setup:: handler = RedisHandler('http://127.0.0.1', port='9200', key='redis') If your Redis instance is password protected, you can securely connect passing your password when creating a RedisHandler object. Example:: handler = RedisHandler(password='your_redis_password') More info about the default buffer size: wp.me/p3tYJu-3b """ def __init__(self, host='127.0.0.1', port=6379, key='redis', extra_fields=None, flush_threshold=128, flush_time=1, level=NOTSET, filter=None, password=False, bubble=True, context=None, push_method='rpush'): Handler.__init__(self, level, filter, bubble) try: import redis from redis import ResponseError except ImportError: raise RuntimeError('The redis library is required for ' 'the RedisHandler') self.redis = redis.Redis(host=host, port=port, password=password, decode_responses=True) try: self.redis.ping() except ResponseError: raise ResponseError( 'The password provided is apparently incorrect') self.key = key self.extra_fields = extra_fields or {} self.flush_threshold = flush_threshold self.queue = [] self.lock = Lock() self.push_method = push_method # Set up a thread that flushes the queue every specified seconds self._stop_event = threading.Event() self._flushing_t = threading.Thread(target=self._flush_task, args=(flush_time, self._stop_event)) self._flushing_t.daemon = True self._flushing_t.start() def _flush_task(self, time, stop_event): """Calls the method _flush_buffer every certain time. """ while not self._stop_event.isSet(): with self.lock: self._flush_buffer() self._stop_event.wait(time) def _flush_buffer(self): """Flushes the messaging queue into Redis. All values are pushed at once for the same key. The method rpush/lpush is defined by push_method argument """ if self.queue: getattr(self.redis, self.push_method)(self.key, *self.queue) self.queue = [] def disable_buffering(self): """Disables buffering. If called, every single message will be directly pushed to Redis. """ self._stop_event.set() self.flush_threshold = 1 def emit(self, record): """Emits a pair (key, value) to redis. The key is the one provided when creating the handler, or redis if none was provided. The value contains both the message and the hostname. Extra values are also appended to the message. """ with self.lock: r = {"message": record.msg, "host": platform.node(), "level": record.level_name, "time": record.time.isoformat()} r.update(self.extra_fields) r.update(record.kwargs) self.queue.append(json.dumps(r)) if len(self.queue) == self.flush_threshold: self._flush_buffer() def close(self): self._flush_buffer() class MessageQueueHandler(Handler): """A handler that acts as a message queue publisher, which publishes each record as json dump. Requires the kombu module. The queue will be filled with JSON exported log records. To receive such log records from a queue you can use the :class:`MessageQueueSubscriber`. For an AMQP backend such as RabbitMQ:: handler = MessageQueueHandler('amqp://guest:guest@localhost//') This requires the py-amqp or the librabbitmq client library. For Redis (requires redis client library):: handler = MessageQueueHandler('redis://localhost:8889/0') For MongoDB (requires pymongo):: handler = MessageQueueHandler('mongodb://localhost:27017/logging') Several other backends are also supported. Refer to the `kombu`_ documentation .. _kombu: http://kombu.readthedocs.org/en/latest/introduction.html """ def __init__(self, uri=None, queue='logging', level=NOTSET, filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) try: import kombu except ImportError: raise RuntimeError('The kombu library is required for ' 'the RabbitMQSubscriber.') if uri: connection = kombu.Connection(uri) self.queue = connection.SimpleQueue(queue) def export_record(self, record): """Exports the record into a dictionary ready for JSON dumping. """ return record.to_dict(json_safe=True) def emit(self, record): self.queue.put(self.export_record(record)) def close(self): self.queue.close() RabbitMQHandler = MessageQueueHandler class ZeroMQHandler(Handler): """A handler that acts as a ZeroMQ publisher, which publishes each record as json dump. Requires the pyzmq library. The queue will be filled with JSON exported log records. To receive such log records from a queue you can use the :class:`ZeroMQSubscriber`. If `multi` is set to `True`, the handler will use a `PUSH` socket to publish the records. This allows multiple handlers to use the same `uri`. The records can be received by using the :class:`ZeroMQSubscriber` with `multi` set to `True`. Example setup:: handler = ZeroMQHandler('tcp://127.0.0.1:5000') """ def __init__(self, uri=None, level=NOTSET, filter=None, bubble=False, context=None, multi=False): Handler.__init__(self, level, filter, bubble) try: import zmq except ImportError: raise RuntimeError('The pyzmq library is required for ' 'the ZeroMQHandler.') #: the zero mq context self.context = context or zmq.Context() if multi: #: the zero mq socket. self.socket = self.context.socket(zmq.PUSH) if uri is not None: self.socket.connect(uri) else: #: the zero mq socket. self.socket = self.context.socket(zmq.PUB) if uri is not None: self.socket.bind(uri) def export_record(self, record): """Exports the record into a dictionary ready for JSON dumping.""" return record.to_dict(json_safe=True) def emit(self, record): self.socket.send(json.dumps( self.export_record(record)).encode("utf-8")) def close(self, linger=-1): self.socket.close(linger) def __del__(self): # When the Handler is deleted we must close our socket in a # non-blocking fashion (using linger). # Otherwise it can block indefinitely, for example if the Subscriber is # not reachable. # If messages are pending on the socket, we wait 100ms for them to be # sent then we discard them. self.close(linger=100) class ThreadController(object): """A helper class used by queue subscribers to control the background thread. This is usually created and started in one go by :meth:`~logbook.queues.ZeroMQSubscriber.dispatch_in_background` or a comparable function. """ def __init__(self, subscriber, setup=None): self.setup = setup self.subscriber = subscriber self.running = False self._thread = None def start(self): """Starts the task thread.""" self.running = True self._thread = Thread(target=self._target) self._thread.setDaemon(True) self._thread.start() def stop(self): """Stops the task thread.""" if self.running: self.running = False self._thread.join() self._thread = None def _target(self): if self.setup is not None: self.setup.push_thread() try: while self.running: self.subscriber.dispatch_once(timeout=0.05) finally: if self.setup is not None: self.setup.pop_thread() class SubscriberBase(object): """Baseclass for all subscribers.""" def recv(self, timeout=None): """Receives a single record from the socket. Timeout of 0 means nonblocking, `None` means blocking and otherwise it's a timeout in seconds after which the function just returns with `None`. Subclasses have to override this. """ raise NotImplementedError() def dispatch_once(self, timeout=None): """Receives one record from the socket, loads it and dispatches it. Returns `True` if something was dispatched or `False` if it timed out. """ rv = self.recv(timeout) if rv is not None: dispatch_record(rv) return True return False def dispatch_forever(self): """Starts a loop that dispatches log records forever.""" while 1: self.dispatch_once() def dispatch_in_background(self, setup=None): """Starts a new daemonized thread that dispatches in the background. An optional handler setup can be provided that pushed to the new thread (can be any :class:`logbook.base.StackedObject`). Returns a :class:`ThreadController` object for shutting down the background thread. The background thread will already be running when this function returns. """ controller = ThreadController(self, setup) controller.start() return controller class MessageQueueSubscriber(SubscriberBase): """A helper that acts as a message queue subscriber and will dispatch received log records to the active handler setup. There are multiple ways to use this class. It can be used to receive log records from a queue:: subscriber = MessageQueueSubscriber('mongodb://localhost:27017/logging') record = subscriber.recv() But it can also be used to receive and dispatch these in one go:: with target_handler: subscriber = MessageQueueSubscriber('mongodb://localhost:27017/logging') subscriber.dispatch_forever() This will take all the log records from that queue and dispatch them over to `target_handler`. If you want you can also do that in the background:: subscriber = MessageQueueSubscriber('mongodb://localhost:27017/logging') controller = subscriber.dispatch_in_background(target_handler) The controller returned can be used to shut down the background thread:: controller.stop() """ def __init__(self, uri=None, queue='logging'): try: import kombu except ImportError: raise RuntimeError('The kombu library is required.') if uri: connection = kombu.Connection(uri) self.queue = connection.SimpleQueue(queue) def __del__(self): try: self.close() except AttributeError: # subscriber partially created pass def close(self): self.queue.close() def recv(self, timeout=None): """Receives a single record from the socket. Timeout of 0 means nonblocking, `None` means blocking and otherwise it's a timeout in seconds after which the function just returns with `None`. """ if timeout == 0: try: rv = self.queue.get(block=False) except Exception: return else: rv = self.queue.get(timeout=timeout) log_record = rv.payload rv.ack() return LogRecord.from_dict(log_record) RabbitMQSubscriber = MessageQueueSubscriber class ZeroMQSubscriber(SubscriberBase): """A helper that acts as ZeroMQ subscriber and will dispatch received log records to the active handler setup. There are multiple ways to use this class. It can be used to receive log records from a queue:: subscriber = ZeroMQSubscriber('tcp://127.0.0.1:5000') record = subscriber.recv() But it can also be used to receive and dispatch these in one go:: with target_handler: subscriber = ZeroMQSubscriber('tcp://127.0.0.1:5000') subscriber.dispatch_forever() This will take all the log records from that queue and dispatch them over to `target_handler`. If you want you can also do that in the background:: subscriber = ZeroMQSubscriber('tcp://127.0.0.1:5000') controller = subscriber.dispatch_in_background(target_handler) The controller returned can be used to shut down the background thread:: controller.stop() If `multi` is set to `True`, the subscriber will use a `PULL` socket and listen to records published by a `PUSH` socket (usually via a :class:`ZeroMQHandler` with `multi` set to `True`). This allows a single subscriber to dispatch multiple handlers. """ def __init__(self, uri=None, context=None, multi=False): try: import zmq except ImportError: raise RuntimeError('The pyzmq library is required for ' 'the ZeroMQSubscriber.') self._zmq = zmq #: the zero mq context self.context = context or zmq.Context() if multi: #: the zero mq socket. self.socket = self.context.socket(zmq.PULL) if uri is not None: self.socket.bind(uri) else: #: the zero mq socket. self.socket = self.context.socket(zmq.SUB) if uri is not None: self.socket.connect(uri) self.socket.setsockopt_unicode(zmq.SUBSCRIBE, u('')) def __del__(self): try: self.close() except AttributeError: # subscriber partially created pass def close(self): """Closes the zero mq socket.""" self.socket.close() def recv(self, timeout=None): """Receives a single record from the socket. Timeout of 0 means nonblocking, `None` means blocking and otherwise it's a timeout in seconds after which the function just returns with `None`. """ if timeout is None: rv = self.socket.recv() elif not timeout: rv = self.socket.recv(self._zmq.NOBLOCK) if rv is None: return else: if not self._zmq.select([self.socket], [], [], timeout)[0]: return rv = self.socket.recv(self._zmq.NOBLOCK) if not PY2: rv = rv.decode("utf-8") return LogRecord.from_dict(json.loads(rv)) def _fix_261_mplog(): """necessary for older python's to disable a broken monkeypatch in the logging module. See multiprocessing/util.py for the hasattr() check. At least in Python 2.6.1 the multiprocessing module is not imported by logging and as such the test in the util fails. """ import logging import multiprocessing logging.multiprocessing = multiprocessing class MultiProcessingHandler(Handler): """Implements a handler that dispatches over a queue to a different process. It is connected to a subscriber with a :class:`multiprocessing.Queue`:: from multiprocessing import Queue from logbook.queues import MultiProcessingHandler queue = Queue(-1) handler = MultiProcessingHandler(queue) """ def __init__(self, queue, level=NOTSET, filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) self.queue = queue _fix_261_mplog() def emit(self, record): self.queue.put_nowait(record.to_dict(json_safe=True)) class MultiProcessingSubscriber(SubscriberBase): """Receives log records from the given multiprocessing queue and dispatches them to the active handler setup. Make sure to use the same queue for both handler and subscriber. Idaelly the queue is set up with maximum size (``-1``):: from multiprocessing import Queue queue = Queue(-1) It can be used to receive log records from a queue:: subscriber = MultiProcessingSubscriber(queue) record = subscriber.recv() But it can also be used to receive and dispatch these in one go:: with target_handler: subscriber = MultiProcessingSubscriber(queue) subscriber.dispatch_forever() This will take all the log records from that queue and dispatch them over to `target_handler`. If you want you can also do that in the background:: subscriber = MultiProcessingSubscriber(queue) controller = subscriber.dispatch_in_background(target_handler) The controller returned can be used to shut down the background thread:: controller.stop() If no queue is provided the subscriber will create one. This one can the be used by handlers:: subscriber = MultiProcessingSubscriber() handler = MultiProcessingHandler(subscriber.queue) """ def __init__(self, queue=None): if queue is None: from multiprocessing import Queue queue = Queue(-1) self.queue = queue _fix_261_mplog() def recv(self, timeout=None): if timeout is None: rv = self.queue.get() else: try: rv = self.queue.get(block=True, timeout=timeout) except Empty: return None return LogRecord.from_dict(rv) class ExecnetChannelHandler(Handler): """Implements a handler that dispatches over a execnet channel to a different process. """ def __init__(self, channel, level=NOTSET, filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) self.channel = channel def emit(self, record): self.channel.send(record.to_dict(json_safe=True)) class ExecnetChannelSubscriber(SubscriberBase): """subscribes to a execnet channel""" def __init__(self, channel): self.channel = channel def recv(self, timeout=None): try: rv = self.channel.receive(timeout=timeout) except self.channel.RemoteError: # XXX: handle return None except (self.channel.TimeoutError, EOFError): return None else: return LogRecord.from_dict(rv) class TWHThreadController(object): """A very basic thread controller that pulls things in from a queue and sends it to a handler. Both queue and handler are taken from the passed :class:`ThreadedWrapperHandler`. """ class Command(object): stop = object() emit = object() emit_batch = object() def __init__(self, wrapper_handler): self.wrapper_handler = wrapper_handler self.running = False self._thread = None def start(self): """Starts the task thread.""" self.running = True self._thread = Thread(target=self._target) self._thread.setDaemon(True) self._thread.start() def stop(self): """Stops the task thread.""" if self.running: self.wrapper_handler.queue.put_nowait((self.Command.stop, )) self._thread.join() self._thread = None def _target(self): while 1: item = self.wrapper_handler.queue.get() command, data = item[0], item[1:] if command is self.Command.stop: self.running = False break elif command is self.Command.emit: (record, ) = data self.wrapper_handler.handler.emit(record) elif command is self.Command.emit_batch: record, reason = data self.wrapper_handler.handler.emit_batch(record, reason) class ThreadedWrapperHandler(WrapperHandler): """This handled uses a single background thread to dispatch log records to a specific other handler using an internal queue. The idea is that if you are using a handler that requires some time to hand off the log records (such as the mail handler) and would block your request, you can let Logbook do that in a background thread. The threaded wrapper handler will automatically adopt the methods and properties of the wrapped handler. All the values will be reflected: >>> twh = ThreadedWrapperHandler(TestHandler()) >>> from logbook import WARNING >>> twh.level_name = 'WARNING' >>> twh.handler.level_name 'WARNING' """ _direct_attrs = frozenset(['handler', 'queue', 'controller']) def __init__(self, handler, maxsize=0): WrapperHandler.__init__(self, handler) self.queue = ThreadQueue(maxsize) self.controller = TWHThreadController(self) self.controller.start() def close(self): self.controller.stop() self.handler.close() def emit(self, record): item = (TWHThreadController.Command.emit, record) try: self.queue.put_nowait(item) except Full: # silently drop pass def emit_batch(self, records, reason): item = (TWHThreadController.Command.emit_batch, records, reason) try: self.queue.put_nowait(item) except Full: # silently drop pass class GroupMember(ThreadController): def __init__(self, subscriber, queue): ThreadController.__init__(self, subscriber, None) self.queue = queue def _target(self): if self.setup is not None: self.setup.push_thread() try: while self.running: record = self.subscriber.recv() if record: try: self.queue.put(record, timeout=0.05) except Full: pass finally: if self.setup is not None: self.setup.pop_thread() class SubscriberGroup(SubscriberBase): """This is a subscriber which represents a group of subscribers. This is helpful if you are writing a server-like application which has "slaves". This way a user is easily able to view every log record which happened somewhere in the entire system without having to check every single slave:: subscribers = SubscriberGroup([ MultiProcessingSubscriber(queue), ZeroMQSubscriber('tcp://127.0.0.1:5000') ]) with target_handler: subscribers.dispatch_forever() """ def __init__(self, subscribers=None, queue_limit=10): self.members = [] self.queue = ThreadQueue(queue_limit) for subscriber in subscribers or []: self.add(subscriber) def add(self, subscriber): """Adds the given `subscriber` to the group.""" member = GroupMember(subscriber, self.queue) member.start() self.members.append(member) def recv(self, timeout=None): try: return self.queue.get(timeout=timeout) except Empty: return def stop(self): """Stops the group from internally recieving any more messages, once the internal queue is exhausted :meth:`recv` will always return `None`. """ for member in self.members: self.member.stop() logbook-1.5.3/logbook/ticketing.py000066400000000000000000000454371355165376200171750ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ logbook.ticketing ~~~~~~~~~~~~~~~~~ Implements long handlers that write to remote data stores and assign each logging message a ticket id. :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ from time import time import json from logbook.base import NOTSET, level_name_property, LogRecord from logbook.handlers import Handler, HashingHandlerMixin from logbook.helpers import cached_property, b, PY2, u class Ticket(object): """Represents a ticket from the database.""" level_name = level_name_property() def __init__(self, db, row): self.db = db self.__dict__.update(row) @cached_property def last_occurrence(self): """The last occurrence.""" rv = self.get_occurrences(limit=1) if rv: return rv[0] def get_occurrences(self, order_by='-time', limit=50, offset=0): """Returns the occurrences for this ticket.""" return self.db.get_occurrences(self.ticket_id, order_by, limit, offset) def solve(self): """Marks this ticket as solved.""" self.db.solve_ticket(self.ticket_id) self.solved = True def delete(self): """Deletes the ticket from the database.""" self.db.delete_ticket(self.ticket_id) # Silence DeprecationWarning __hash__ = None def __eq__(self, other): equal = True for key in self.__dict__.keys(): if getattr(self, key) != getattr(other, key): equal = False break return equal def __ne__(self, other): return not self.__eq__(other) class Occurrence(LogRecord): """Represents an occurrence of a ticket.""" def __init__(self, db, row): self.update_from_dict(json.loads(row['data'])) self.db = db self.time = row['time'] self.ticket_id = row['ticket_id'] self.occurrence_id = row['occurrence_id'] class BackendBase(object): """Provides an abstract interface to various databases.""" def __init__(self, **options): self.options = options self.setup_backend() def setup_backend(self): """Setup the database backend.""" raise NotImplementedError() def record_ticket(self, record, data, hash, app_id): """Records a log record as ticket.""" raise NotImplementedError() def count_tickets(self): """Returns the number of tickets.""" raise NotImplementedError() def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0): """Selects tickets from the database.""" raise NotImplementedError() def solve_ticket(self, ticket_id): """Marks a ticket as solved.""" raise NotImplementedError() def delete_ticket(self, ticket_id): """Deletes a ticket from the database.""" raise NotImplementedError() def get_ticket(self, ticket_id): """Return a single ticket with all occurrences.""" raise NotImplementedError() def get_occurrences(self, ticket, order_by='-time', limit=50, offset=0): """Selects occurrences from the database for a ticket.""" raise NotImplementedError() class SQLAlchemyBackend(BackendBase): """Implements a backend that is writing into a database SQLAlchemy can interface. This backend takes some additional options: `table_prefix` an optional table prefix for all tables created by the logbook ticketing handler. `metadata` an optional SQLAlchemy metadata object for the table creation. `autocreate_tables` can be set to `False` to disable the automatic creation of the logbook tables. """ def setup_backend(self): from sqlalchemy import create_engine, MetaData from sqlalchemy.orm import sessionmaker, scoped_session engine_or_uri = self.options.pop('uri', None) metadata = self.options.pop('metadata', None) table_prefix = self.options.pop('table_prefix', 'logbook_') if hasattr(engine_or_uri, 'execute'): self.engine = engine_or_uri else: # Pool recycle keeps connections from going stale, # which happens in MySQL Databases # Pool size is more custom for out stack self.engine = create_engine(engine_or_uri, convert_unicode=True, pool_recycle=360, pool_size=1000) # Create session factory using session maker session = sessionmaker() # Bind to the engined session.configure(bind=self.engine) # Scoped session is a thread safe solution for # interaction with the Database self.session = scoped_session(session) if metadata is None: metadata = MetaData() self.table_prefix = table_prefix self.metadata = metadata self.create_tables() if self.options.get('autocreate_tables', True): self.metadata.create_all(bind=self.engine) def create_tables(self): """Creates the tables required for the handler on the class and metadata. """ import sqlalchemy as db def table(name, *args, **kwargs): return db.Table(self.table_prefix + name, self.metadata, *args, **kwargs) self.tickets = table('tickets', db.Column('ticket_id', db.Integer, primary_key=True), db.Column('record_hash', db.String(40), unique=True), db.Column('level', db.Integer), db.Column('channel', db.String(120)), db.Column('location', db.String(512)), db.Column('module', db.String(256)), db.Column('last_occurrence_time', db.DateTime), db.Column('occurrence_count', db.Integer), db.Column('solved', db.Boolean), db.Column('app_id', db.String(80))) self.occurrences = table('occurrences', db.Column('occurrence_id', db.Integer, primary_key=True), db.Column('ticket_id', db.Integer, db.ForeignKey(self.table_prefix + 'tickets.ticket_id')), db.Column('time', db.DateTime), db.Column('data', db.Text), db.Column('app_id', db.String(80))) def _order(self, q, table, order_by): if order_by[0] == '-': return q.order_by(table.c[order_by[1:]].desc()) return q.order_by(table.c[order_by]) def record_ticket(self, record, data, hash, app_id): """Records a log record as ticket.""" # Can use the session instead engine.connection and transaction s = self.session try: q = self.tickets.select(self.tickets.c.record_hash == hash) row = s.execute(q).fetchone() if row is None: row = s.execute(self.tickets.insert().values( record_hash=hash, level=record.level, channel=record.channel or u(''), location=u('%s:%d') % (record.filename, record.lineno), module=record.module or u(''), occurrence_count=0, solved=False, app_id=app_id )) ticket_id = row.inserted_primary_key[0] else: ticket_id = row['ticket_id'] s.execute(self.occurrences.insert() .values(ticket_id=ticket_id, time=record.time, app_id=app_id, data=json.dumps(data))) s.execute( self.tickets.update() .where(self.tickets.c.ticket_id == ticket_id) .values(occurrence_count=self.tickets.c.occurrence_count + 1, last_occurrence_time=record.time, solved=False)) s.commit() except Exception: s.rollback() raise # Closes the session and removes it from the pool s.remove() def count_tickets(self): """Returns the number of tickets.""" return self.engine.execute(self.tickets.count()).fetchone()[0] def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0): """Selects tickets from the database.""" return [Ticket(self, row) for row in self.engine.execute( self._order(self.tickets.select(), self.tickets, order_by) .limit(limit).offset(offset)).fetchall()] def solve_ticket(self, ticket_id): """Marks a ticket as solved.""" self.engine.execute(self.tickets.update() .where(self.tickets.c.ticket_id == ticket_id) .values(solved=True)) def delete_ticket(self, ticket_id): """Deletes a ticket from the database.""" self.engine.execute(self.occurrences.delete() .where(self.occurrences.c.ticket_id == ticket_id)) self.engine.execute(self.tickets.delete() .where(self.tickets.c.ticket_id == ticket_id)) def get_ticket(self, ticket_id): """Return a single ticket with all occurrences.""" row = self.engine.execute(self.tickets.select().where( self.tickets.c.ticket_id == ticket_id)).fetchone() if row is not None: return Ticket(self, row) def get_occurrences(self, ticket, order_by='-time', limit=50, offset=0): """Selects occurrences from the database for a ticket.""" return [Occurrence(self, row) for row in self.engine.execute(self._order( self.occurrences.select() .where(self.occurrences.c.ticket_id == ticket), self.occurrences, order_by) .limit(limit).offset(offset)).fetchall()] class MongoDBBackend(BackendBase): """Implements a backend that writes into a MongoDB database.""" class _FixedTicketClass(Ticket): @property def ticket_id(self): return self._id class _FixedOccurrenceClass(Occurrence): def __init__(self, db, row): self.update_from_dict(json.loads(row['data'])) self.db = db self.time = row['time'] self.ticket_id = row['ticket_id'] self.occurrence_id = row['_id'] # TODO: Update connection setup once PYTHON-160 is solved. def setup_backend(self): from pymongo import ASCENDING, DESCENDING from pymongo.connection import Connection try: from pymongo.uri_parser import parse_uri except ImportError: from pymongo.connection import _parse_uri as parse_uri from pymongo.errors import AutoReconnect _connection = None uri = self.options.pop('uri', u('')) _connection_attempts = 0 parsed_uri = parse_uri(uri, Connection.PORT) if type(parsed_uri) is tuple: # pymongo < 2.0 database = parsed_uri[1] else: # pymongo >= 2.0 database = parsed_uri['database'] # Handle auto reconnect signals properly while _connection_attempts < 5: try: if _connection is None: _connection = Connection(uri) database = _connection[database] break except AutoReconnect: _connection_attempts += 1 time.sleep(0.1) self.database = database # setup correct indexes database.tickets.ensure_index([('record_hash', ASCENDING)], unique=True) database.tickets.ensure_index([('solved', ASCENDING), ('level', ASCENDING)]) database.occurrences.ensure_index([('time', DESCENDING)]) def _order(self, q, order_by): from pymongo import ASCENDING, DESCENDING col = '%s' % (order_by[0] == '-' and order_by[1:] or order_by) if order_by[0] == '-': return q.sort(col, DESCENDING) return q.sort(col, ASCENDING) def _oid(self, ticket_id): from pymongo.objectid import ObjectId return ObjectId(ticket_id) def record_ticket(self, record, data, hash, app_id): """Records a log record as ticket.""" db = self.database ticket = db.tickets.find_one({'record_hash': hash}) if not ticket: doc = { 'record_hash': hash, 'level': record.level, 'channel': record.channel or u(''), 'location': u('%s:%d') % (record.filename, record.lineno), 'module': record.module or u(''), 'occurrence_count': 0, 'solved': False, 'app_id': app_id, } ticket_id = db.tickets.insert(doc) else: ticket_id = ticket['_id'] db.tickets.update({'_id': ticket_id}, { '$inc': { 'occurrence_count': 1 }, '$set': { 'last_occurrence_time': record.time, 'solved': False } }) # We store occurrences in a seperate collection so that # we can make it a capped collection optionally. db.occurrences.insert({ 'ticket_id': self._oid(ticket_id), 'app_id': app_id, 'time': record.time, 'data': json.dumps(data), }) def count_tickets(self): """Returns the number of tickets.""" return self.database.tickets.count() def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0): """Selects tickets from the database.""" query = (self._order(self.database.tickets.find(), order_by) .limit(limit).skip(offset)) return [self._FixedTicketClass(self, obj) for obj in query] def solve_ticket(self, ticket_id): """Marks a ticket as solved.""" self.database.tickets.update({'_id': self._oid(ticket_id)}, {'solved': True}) def delete_ticket(self, ticket_id): """Deletes a ticket from the database.""" self.database.occurrences.remove({'ticket_id': self._oid(ticket_id)}) self.database.tickets.remove({'_id': self._oid(ticket_id)}) def get_ticket(self, ticket_id): """Return a single ticket with all occurrences.""" ticket = self.database.tickets.find_one({'_id': self._oid(ticket_id)}) if ticket: return Ticket(self, ticket) def get_occurrences(self, ticket, order_by='-time', limit=50, offset=0): """Selects occurrences from the database for a ticket.""" collection = self.database.occurrences occurrences = self._order(collection.find( {'ticket_id': self._oid(ticket)} ), order_by).limit(limit).skip(offset) return [self._FixedOccurrenceClass(self, obj) for obj in occurrences] class TicketingBaseHandler(Handler, HashingHandlerMixin): """Baseclass for ticketing handlers. This can be used to interface ticketing systems that do not necessarily provide an interface that would be compatible with the :class:`BackendBase` interface. """ def __init__(self, hash_salt, level=NOTSET, filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) self.hash_salt = hash_salt def hash_record_raw(self, record): """Returns the unique hash of a record.""" hash = HashingHandlerMixin.hash_record_raw(self, record) if self.hash_salt is not None: hash_salt = self.hash_salt if not PY2 or isinstance(hash_salt, unicode): hash_salt = hash_salt.encode('utf-8') hash.update(b('\x00') + hash_salt) return hash class TicketingHandler(TicketingBaseHandler): """A handler that writes log records into a remote database. This database can be connected to from different dispatchers which makes this a nice setup for web applications:: from logbook.ticketing import TicketingHandler handler = TicketingHandler('sqlite:////tmp/myapp-logs.db') :param uri: a backend specific string or object to decide where to log to. :param app_id: a string with an optional ID for an application. Can be used to keep multiple application setups apart when logging into the same database. :param hash_salt: an optional salt (binary string) for the hashes. :param backend: A backend class that implements the proper database handling. Backends available are: :class:`SQLAlchemyBackend`, :class:`MongoDBBackend`. """ #: The default backend that is being used when no backend is specified. #: Unless overriden by a subclass this will be the #: :class:`SQLAlchemyBackend`. default_backend = SQLAlchemyBackend def __init__(self, uri, app_id='generic', level=NOTSET, filter=None, bubble=False, hash_salt=None, backend=None, **db_options): if hash_salt is None: hash_salt = u('apphash-') + app_id TicketingBaseHandler.__init__(self, hash_salt, level, filter, bubble) if backend is None: backend = self.default_backend db_options['uri'] = uri self.set_backend(backend, **db_options) self.app_id = app_id def set_backend(self, cls, **options): self.db = cls(**options) def process_record(self, record, hash): """Subclasses can override this to tamper with the data dict that is sent to the database as JSON. """ return record.to_dict(json_safe=True) def record_ticket(self, record, data, hash): """Record either a new ticket or a new occurrence for a ticket based on the hash. """ self.db.record_ticket(record, data, hash, self.app_id) def emit(self, record): """Emits a single record and writes it to the database.""" hash = self.hash_record(record).encode('utf-8') data = self.process_record(record, hash) self.record_ticket(record, data, hash) logbook-1.5.3/logbook/utils.py000066400000000000000000000132271355165376200163440ustar00rootroot00000000000000from contextlib import contextmanager import functools import sys import threading from .base import Logger, DEBUG from .helpers import string_types class _SlowContextNotifier(object): def __init__(self, threshold, func): self.timer = threading.Timer(threshold, func) def __enter__(self): self.timer.start() return self def __exit__(self, *_): self.timer.cancel() _slow_logger = Logger('Slow') def logged_if_slow(*args, **kwargs): """Context manager that logs if operations within take longer than `threshold` seconds. :param threshold: Number of seconds (or fractions thereof) allwoed before logging occurs. The default is 1 second. :param logger: :class:`~logbook.Logger` to use. The default is a 'slow' logger. :param level: Log level. The default is `DEBUG`. :param func: (Deprecated). Function to call to perform logging. The remaining parameters are passed to the :meth:`~logbook.base.LoggerMixin.log` method. """ threshold = kwargs.pop('threshold', 1) func = kwargs.pop('func', None) if func is None: logger = kwargs.pop('logger', _slow_logger) level = kwargs.pop('level', DEBUG) func = functools.partial(logger.log, level, *args, **kwargs) else: if 'logger' in kwargs or 'level' in kwargs: raise TypeError("If using deprecated func parameter, 'logger' and" " 'level' arguments cannot be passed.") func = functools.partial(func, *args, **kwargs) return _SlowContextNotifier(threshold, func) class _Local(threading.local): enabled = True _local = _Local() @contextmanager def suppressed_deprecations(): """Disables deprecation messages temporarily >>> with suppressed_deprecations(): ... call_some_deprecated_logic() .. versionadded:: 0.12 """ prev_enabled = _local.enabled _local.enabled = False try: yield finally: _local.enabled = prev_enabled _deprecation_logger = Logger("deprecation") _deprecation_locations = set() def forget_deprecation_locations(): _deprecation_locations.clear() def _write_deprecations_if_needed(message, frame_correction): if not _local.enabled: return caller_location = _get_caller_location(frame_correction=frame_correction+1) if caller_location not in _deprecation_locations: _deprecation_logger.warning(message, frame_correction=frame_correction+1) _deprecation_locations.add(caller_location) def log_deprecation_message(message, frame_correction=0): _write_deprecations_if_needed("Deprecation message: {0}".format(message), frame_correction=frame_correction+1) class _DeprecatedFunction(object): def __init__(self, func, message, obj=None, objtype=None): super(_DeprecatedFunction, self).__init__() self._func = func self._message = message self._obj = obj self._objtype = objtype def _get_underlying_func(self): returned = self._func if isinstance(returned, classmethod): if hasattr(returned, '__func__'): returned = returned.__func__ else: returned = returned.__get__(self._objtype).__func__ return returned def __call__(self, *args, **kwargs): func = self._get_underlying_func() warning = "{0} is deprecated.".format(self._get_func_str()) if self._message is not None: warning += " {0}".format(self._message) _write_deprecations_if_needed(warning, frame_correction=+1) if self._obj is not None: return func(self._obj, *args, **kwargs) elif self._objtype is not None: return func(self._objtype, *args, **kwargs) return func(*args, **kwargs) def _get_func_str(self): func = self._get_underlying_func() if self._objtype is not None: return '{0}.{1}'.format(self._objtype.__name__, func.__name__) return '{0}.{1}'.format(func.__module__, func.__name__) def __get__(self, obj, objtype): return self.bound_to(obj, objtype) def bound_to(self, obj, objtype): return _DeprecatedFunction(self._func, self._message, obj=obj, objtype=objtype) @property def __name__(self): return self._get_underlying_func().__name__ @property def __doc__(self): returned = self._get_underlying_func().__doc__ if returned: # pylint: disable=no-member returned += "\n.. deprecated\n" # pylint: disable=no-member if self._message: returned += " {0}".format( self._message) # pylint: disable=no-member return returned @__doc__.setter def __doc__(self, doc): self._get_underlying_func().__doc__ = doc def deprecated(func=None, message=None): """Marks the specified function as deprecated, and emits a warning when it's called. >>> @deprecated(message='No longer supported') ... def deprecated_func(): ... pass This will cause a warning log to be emitted when the function gets called, with the correct filename/lineno. .. versionadded:: 0.12 """ if isinstance(func, string_types): assert message is None message = func func = None if func is None: return functools.partial(deprecated, message=message) return _DeprecatedFunction(func, message) def _get_caller_location(frame_correction): frame = sys._getframe(frame_correction + 1) # pylint: disable=protected-access try: return (frame.f_code.co_name, frame.f_lineno) finally: del frame logbook-1.5.3/scripts/000077500000000000000000000000001355165376200146605ustar00rootroot00000000000000logbook-1.5.3/scripts/make-release.py000066400000000000000000000076361355165376200176010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ make-release ~~~~~~~~~~~~ Helper script that performs a release. Does pretty much everything automatically for us. :copyright: (c) 2011 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import sys import os import re import argparse from datetime import datetime, date from subprocess import Popen, PIPE _date_clean_re = re.compile(r'(\d+)(st|nd|rd|th)') def parse_changelog(): with open('CHANGES') as f: lineiter = iter(f) for line in lineiter: match = re.search('^Version\s+(.*)', line.strip()) if match is None: continue version = match.group(1).strip() if lineiter.next().count('-') != len(match.group(0)): continue while 1: change_info = lineiter.next().strip() if change_info: break match = re.search(r'released on (\w+\s+\d+\w+\s+\d+)' r'(?:, codename (.*))?(?i)', change_info) if match is None: continue datestr, codename = match.groups() return version, parse_date(datestr), codename def bump_version(version): try: parts = map(int, version.split('.')) except ValueError: fail('Current version is not numeric') parts[-1] += 1 return '.'.join(map(str, parts)) def parse_date(string): string = _date_clean_re.sub(r'\1', string) return datetime.strptime(string, '%B %d %Y') def set_filename_version(filename, version_number, pattern): changed = [] def inject_version(match): before, old, after = match.groups() changed.append(True) return before + version_number + after with open(filename) as f: contents = re.sub(r"^(\s*%s\s*=\s*')(.+?)(')(?sm)" % pattern, inject_version, f.read()) if not changed: fail('Could not find %s in %s', pattern, filename) with open(filename, 'w') as f: f.write(contents) def set_version(version): info('Setting version to %s', version) with open('logbook/__version__.py', 'w') as f: f.write('__version__ = {!r}'.format(version)) def fail(message, *args): print >> sys.stderr, 'Error:', message % args sys.exit(1) def info(message, *args): print >> sys.stderr, message % args def get_git_tags(): return set(Popen(['git', 'tag'], stdout=PIPE).communicate()[0].splitlines()) def git_is_clean(): return Popen(['git', 'diff', '--quiet']).wait() == 0 def make_git_commit(message, *args): message = message % args Popen(['git', 'commit', '-am', message]).wait() def make_git_tag(tag): info('Tagging "%s"', tag) Popen(['git', 'tag', tag]).wait() parser = argparse.ArgumentParser("%prog [options]") parser.add_argument("--no-upload", dest="upload", action="store_false", default=True) def main(): args = parser.parse_args() os.chdir(os.path.join(os.path.dirname(__file__), '..')) rv = parse_changelog() if rv is None: fail('Could not parse changelog') version, release_date, codename = rv dev_version = bump_version(version) + '-dev' info('Releasing %s (codename %s, release date %s)', version, codename, release_date.strftime('%d/%m/%Y')) tags = get_git_tags() if version in tags: fail('Version "%s" is already tagged', version) if release_date.date() != date.today(): fail('Release date is not today (%s != %s)' % (release_date.date(), date.today())) if not git_is_clean(): fail('You have uncommitted changes in git') set_version(version) make_git_commit('Bump version number to %s', version) make_git_tag(version) set_version(dev_version) make_git_commit('Bump version number to %s', dev_version) if __name__ == '__main__': main() logbook-1.5.3/scripts/test_setup.py000066400000000000000000000005541355165376200174350ustar00rootroot00000000000000#! /usr/bin/python from pip._internal import main as pip_main import sys if __name__ == '__main__': python_version = sys.version_info deps = [ "execnet>=1.0.9", "pytest", "pyzmq", "sqlalchemy", "Jinja2", ] print("Setting up dependencies...") result = pip_main(["install"] + deps) sys.exit(result) logbook-1.5.3/scripts/travis_build.py000066400000000000000000000007211355165376200177210ustar00rootroot00000000000000#! /usr/bin/python from __future__ import print_function import ast import os import subprocess import sys _PYPY = hasattr(sys, "pypy_version_info") if __name__ == '__main__': use_cython = ast.literal_eval(os.environ["USE_CYTHON"]) if use_cython and _PYPY: print("PyPy+Cython configuration skipped") else: sys.exit( subprocess.call( "make cybuild test" if use_cython else "make test", shell=True) ) logbook-1.5.3/setup.cfg000066400000000000000000000001651355165376200150140ustar00rootroot00000000000000[build_sphinx] source-dir = docs/ build-dir = docs/_build all_files = 1 [upload_docs] upload-dir = docs/_build/html logbook-1.5.3/setup.py000066400000000000000000000160531355165376200147100ustar00rootroot00000000000000r""" Logbook ------- An awesome logging implementation that is fun to use. Quickstart `````````` :: from logbook import Logger log = Logger('A Fancy Name') log.warn('Logbook is too awesome for most applications') log.error("Can't touch this") Works for web apps too `````````````````````` :: from logbook import MailHandler, Processor mailhandler = MailHandler(from_addr='servererror@example.com', recipients=['admin@example.com'], level='ERROR', format_string=u'''\ Subject: Application Error for {record.extra[path]} [{record.extra[method]}] Message type: {record.level_name} Location: {record.filename}:{record.lineno} Module: {record.module} Function: {record.func_name} Time: {record.time:%Y-%m-%d %H:%M:%S} Remote IP: {record.extra[ip]} Request: {record.extra[path]} [{record.extra[method]}] Message: {record.message} ''') def handle_request(request): def inject_extra(record, handler): record.extra['ip'] = request.remote_addr record.extra['method'] = request.method record.extra['path'] = request.path with Processor(inject_extra): with mailhandler: # execute code that might fail in the context of the # request. """ import os import platform import sys from itertools import chain from distutils.command.build_ext import build_ext from distutils.errors import ( CCompilerError, DistutilsExecError, DistutilsPlatformError) from setuptools import Distribution as _Distribution, Extension, setup from setuptools.command.test import test as TestCommand cmdclass = {} if sys.version_info < (2, 6): raise Exception('Logbook requires Python 2.6 or higher.') cpython = platform.python_implementation() == 'CPython' ext_modules = [Extension('logbook._speedups', sources=['logbook/_speedups.c'])] ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError) if sys.platform == 'win32': # 2.6's distutils.msvc9compiler can raise an IOError when failing to # find the compiler ext_errors += (IOError,) class BuildFailed(Exception): def __init__(self): self.cause = sys.exc_info()[1] # work around py 2/3 different syntax class ve_build_ext(build_ext): """This class allows C extension building to fail.""" def run(self): try: build_ext.run(self) except DistutilsPlatformError: raise BuildFailed() def build_extension(self, ext): try: build_ext.build_extension(self, ext) except ext_errors: raise BuildFailed() except ValueError: # this can happen on Windows 64 bit, see Python issue 7511 if "'path'" in str(sys.exc_info()[1]): # works with both py 2/3 raise BuildFailed() raise cmdclass['build_ext'] = ve_build_ext class Distribution(_Distribution): def has_ext_modules(self): # We want to always claim that we have ext_modules. This will be fine # if we don't actually have them (such as on PyPy) because nothing # will get built, however we don't want to provide an overally broad # Wheel package when building a wheel without C support. This will # ensure that Wheel knows to treat us as if the build output is # platform specific. return True class PyTest(TestCommand): # from https://pytest.org/latest/goodpractises.html\ # #integration-with-setuptools-test-commands user_options = [('pytest-args=', 'a', 'Arguments to pass to py.test')] default_options = ['tests'] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = '' def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): # import here, cause outside the eggs aren't loaded import pytest errno = pytest.main( ' '.join(self.default_options) + ' ' + self.pytest_args) sys.exit(errno) cmdclass['test'] = PyTest def status_msgs(*msgs): print('*' * 75) for msg in msgs: print(msg) print('*' * 75) version_file_path = os.path.join( os.path.dirname(__file__), 'logbook', '__version__.py') with open(version_file_path) as version_file: exec(version_file.read()) # pylint: disable=W0122 extras_require = dict() if sys.version_info[:2] < (3, 0): extras_require['test'] = set(['pytest', 'pytest-cov<2.6']) else: extras_require['test'] = set(['pytest>4.0', 'pytest-cov>=2.6']) if sys.version_info[:2] < (3, 3): extras_require['test'] |= set(['mock']) extras_require['dev'] = set(['cython']) | extras_require['test'] extras_require['execnet'] = set(['execnet>=1.0.9']) extras_require['sqlalchemy'] = set(['sqlalchemy']) extras_require['redis'] = set(['redis']) extras_require['zmq'] = set(['pyzmq']) extras_require['jinja'] = set(['Jinja2']) extras_require['compression'] = set(['brotli']) extras_require['all'] = set(chain.from_iterable(extras_require.values())) def run_setup(with_cext): kwargs = {} if with_cext: kwargs['ext_modules'] = ext_modules else: kwargs['ext_modules'] = [] setup( name='Logbook', version=__version__, license='BSD', url='http://logbook.pocoo.org/', author='Armin Ronacher, Georg Brandl', author_email='armin.ronacher@active-4.com', description='A logging replacement for Python', long_description=__doc__, packages=['logbook'], zip_safe=False, platforms='any', cmdclass=cmdclass, tests_require=['pytest'], classifiers=[ 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], extras_require=extras_require, distclass=Distribution, **kwargs ) if not cpython: run_setup(False) status_msgs( 'WARNING: C extensions are not supported on ' + 'this Python platform, speedups are not enabled.', 'Plain-Python build succeeded.' ) elif os.environ.get('DISABLE_LOGBOOK_CEXT'): run_setup(False) status_msgs( 'DISABLE_LOGBOOK_CEXT is set; ' + 'not attempting to build C extensions.', 'Plain-Python build succeeded.' ) else: try: run_setup(True) except BuildFailed as exc: status_msgs( exc.cause, 'WARNING: The C extension could not be compiled, ' + 'speedups are not enabled.', 'Failure information, if any, is above.', 'Retrying the build without the C extension now.' ) run_setup(False) status_msgs( 'WARNING: The C extension could not be compiled, ' + 'speedups are not enabled.', 'Plain-Python build succeeded.' ) logbook-1.5.3/tests/000077500000000000000000000000001355165376200143335ustar00rootroot00000000000000logbook-1.5.3/tests/__init__.py000066400000000000000000000000001355165376200164320ustar00rootroot00000000000000logbook-1.5.3/tests/conftest.py000066400000000000000000000051611355165376200165350ustar00rootroot00000000000000import sys import logbook import pytest logbook.StderrHandler().push_application() @pytest.fixture def logger(): return logbook.Logger('testlogger') @pytest.fixture def active_handler(request, test_handler, activation_strategy): s = activation_strategy(test_handler) s.activate() @request.addfinalizer def deactivate(): s.deactivate() return test_handler @pytest.fixture def test_handler(): return logbook.TestHandler() class ActivationStrategy(object): def __init__(self, handler): super(ActivationStrategy, self).__init__() self.handler = handler def activate(self): raise NotImplementedError() # pragma: no cover def deactivate(self): raise NotImplementedError() # pragma: no cover def __enter__(self): self.activate() return self.handler def __exit__(self, *_): self.deactivate() class ContextEnteringStrategy(ActivationStrategy): def activate(self): self.handler.__enter__() def deactivate(self): self.handler.__exit__(None, None, None) class PushingStrategy(ActivationStrategy): def activate(self): from logbook.concurrency import is_gevent_enabled if is_gevent_enabled(): self.handler.push_greenlet() else: self.handler.push_thread() def deactivate(self): from logbook.concurrency import is_gevent_enabled if is_gevent_enabled(): self.handler.pop_greenlet() else: self.handler.pop_thread() @pytest.fixture(params=[ContextEnteringStrategy, PushingStrategy]) def activation_strategy(request): return request.param @pytest.fixture def logfile(tmpdir): return str(tmpdir.join('logfile.log')) @pytest.fixture def default_handler(request): returned = logbook.StderrHandler() returned.push_application() request.addfinalizer(returned.pop_application) return returned try: import gevent except ImportError: pass else: @pytest.fixture(scope="module", autouse=True, params=[False, True]) def gevent(request): module_name = getattr(request.module, '__name__', '') if (not any(s in module_name for s in ('queues', 'processors')) and request.param): from logbook.concurrency import enable_gevent, _disable_gevent enable_gevent() @request.addfinalizer def fin(): _disable_gevent() def pytest_ignore_collect(path, config): if 'test_asyncio.py' in path.basename and (sys.version_info.major < 3 or sys.version_info.minor < 5): return True return False logbook-1.5.3/tests/test_asyncio.py000066400000000000000000000014411355165376200174110ustar00rootroot00000000000000import pytest import logbook import asyncio from logbook.concurrency import has_contextvars ITERATIONS = 100 @pytest.mark.skipif(not has_contextvars, reason="Contexvars not available") def test_asyncio_context_management(logger): h1 = logbook.TestHandler() h2 = logbook.TestHandler() async def task(handler, msg): for _ in range(ITERATIONS): with handler.contextbound(): logger.info(msg) await asyncio.sleep(0) # allow for context switch asyncio.get_event_loop().run_until_complete(asyncio.gather(task(h1, 'task1'), task(h2, 'task2'))) assert len(h1.records) == ITERATIONS assert all(['task1' == r.msg for r in h1.records]) assert len(h2.records) == ITERATIONS assert all(['task2' == r.msg for r in h2.records]) logbook-1.5.3/tests/test_ci.py000066400000000000000000000007321355165376200163410ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os import pytest from .utils import appveyor, travis @appveyor def test_appveyor_speedups(): if os.environ.get('CYBUILD'): import logbook._speedups else: with pytest.raises(ImportError): import logbook._speedups @travis def test_travis_speedups(): if os.environ.get('CYBUILD'): import logbook._speedups else: with pytest.raises(ImportError): import logbook._speedups logbook-1.5.3/tests/test_deadlock.py000066400000000000000000000017321355165376200175150ustar00rootroot00000000000000import sys import logbook class MyObject(object): def __init__(self, logger_func): self._logger_func = logger_func def __str__(self): self._logger_func("this debug message produced in __str__") return "" class FakeLock(object): def __init__(self): self._acquired = False self._deadlock_occurred = False def acquire(self): if self._acquired: self._deadlock_occurred = True self._acquired = True def release(self): self._acquired = False def test_deadlock_in_emit(): logbook_logger = logbook.Logger("logbook") obj = MyObject(logbook_logger.info) stream_handler = logbook.StreamHandler(stream=sys.stderr, level=logbook.DEBUG) stream_handler.lock = FakeLock() with stream_handler.applicationbound(): logbook_logger.info("format this: {}", obj) assert not stream_handler.lock._deadlock_occurred logbook-1.5.3/tests/test_file_handler.py000066400000000000000000000230761355165376200203700ustar00rootroot00000000000000import os import pytest import time from datetime import datetime import logbook from logbook.helpers import u, xrange import gzip import brotli from .utils import capturing_stderr_context, LETTERS def test_file_handler(logfile, activation_strategy, logger): handler = logbook.FileHandler( logfile, format_string='{record.level_name}:{record.channel}:{record.message}',) with activation_strategy(handler): logger.warn('warning message') handler.close() with open(logfile) as f: assert f.readline() == 'WARNING:testlogger:warning message\n' def test_file_handler_unicode(logfile, activation_strategy, logger): with capturing_stderr_context() as captured: with activation_strategy(logbook.FileHandler(logfile)): logger.info(u('\u0431')) assert (not captured.getvalue()) def test_file_handler_delay(logfile, activation_strategy, logger): handler = logbook.FileHandler( logfile, format_string='{record.level_name}:{record.channel}:{record.message}', delay=True) assert (not os.path.isfile(logfile)) with activation_strategy(handler): logger.warn('warning message') handler.close() with open(logfile) as f: assert f.readline() == 'WARNING:testlogger:warning message\n' def test_monitoring_file_handler(logfile, activation_strategy, logger): if os.name == 'nt': pytest.skip( 'unsupported on windows due to different IO (also unneeded)') handler = logbook.MonitoringFileHandler( logfile, format_string='{record.level_name}:{record.channel}:{record.message}', delay=True) with activation_strategy(handler): logger.warn('warning message') os.rename(logfile, logfile + '.old') logger.warn('another warning message') handler.close() with open(logfile) as f: assert f.read().strip() == 'WARNING:testlogger:another warning message' def test_custom_formatter(activation_strategy, logfile, logger): def custom_format(record, handler): return record.level_name + ':' + record.message handler = logbook.FileHandler(logfile) with activation_strategy(handler): handler.formatter = custom_format logger.warn('Custom formatters are awesome') with open(logfile) as f: assert f.readline() == 'WARNING:Custom formatters are awesome\n' def test_rotating_file_handler(logfile, activation_strategy, logger): basename = os.path.basename(logfile) handler = logbook.RotatingFileHandler(logfile, max_size=2048, backup_count=3, ) handler.format_string = '{record.message}' with activation_strategy(handler): for c, x in zip(LETTERS, xrange(32)): logger.warn(c * 256) files = [x for x in os.listdir(os.path.dirname(logfile)) if x.startswith(basename)] files.sort() assert files == [basename, basename + '.1', basename + '.2', basename + '.3'] with open(logfile) as f: assert f.readline().rstrip() == ('C' * 256) assert f.readline().rstrip() == ('D' * 256) assert f.readline().rstrip() == ('E' * 256) assert f.readline().rstrip() == ('F' * 256) @pytest.mark.parametrize("backup_count", [1, 3]) def test_timed_rotating_file_handler(tmpdir, activation_strategy, backup_count): basename = str(tmpdir.join('trot.log')) handler = logbook.TimedRotatingFileHandler( basename, backup_count=backup_count) handler.format_string = '[{record.time:%H:%M}] {record.message}' def fake_record(message, year, month, day, hour=0, minute=0, second=0): lr = logbook.LogRecord('Test Logger', logbook.WARNING, message) lr.time = datetime(year, month, day, hour, minute, second) return lr with activation_strategy(handler): for x in xrange(10): handler.handle(fake_record('First One', 2010, 1, 5, x + 1)) for x in xrange(20): handler.handle(fake_record('Second One', 2010, 1, 6, x + 1)) for x in xrange(10): handler.handle(fake_record('Third One', 2010, 1, 7, x + 1)) for x in xrange(20): handler.handle(fake_record('Last One', 2010, 1, 8, x + 1)) files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith('trot')) assert files == ['trot-2010-01-0{0}.log'.format(i) for i in xrange(5, 9)][-backup_count:] with open(str(tmpdir.join('trot-2010-01-08.log'))) as f: assert f.readline().rstrip() == '[01:00] Last One' assert f.readline().rstrip() == '[02:00] Last One' if backup_count > 1: with open(str(tmpdir.join('trot-2010-01-07.log'))) as f: assert f.readline().rstrip() == '[01:00] Third One' assert f.readline().rstrip() == '[02:00] Third One' @pytest.mark.parametrize("backup_count", [1, 3]) def test_timed_rotating_file_handler__rollover_format(tmpdir, activation_strategy, backup_count): basename = str(tmpdir.join('trot.log')) handler = logbook.TimedRotatingFileHandler( basename, backup_count=backup_count, rollover_format='{basename}{ext}.{timestamp}', ) handler.format_string = '[{record.time:%H:%M}] {record.message}' def fake_record(message, year, month, day, hour=0, minute=0, second=0): lr = logbook.LogRecord('Test Logger', logbook.WARNING, message) lr.time = datetime(year, month, day, hour, minute, second) return lr with activation_strategy(handler): for x in xrange(10): handler.handle(fake_record('First One', 2010, 1, 5, x + 1)) for x in xrange(20): handler.handle(fake_record('Second One', 2010, 1, 6, x + 1)) for x in xrange(10): handler.handle(fake_record('Third One', 2010, 1, 7, x + 1)) for x in xrange(20): handler.handle(fake_record('Last One', 2010, 1, 8, x + 1)) files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith('trot')) assert files == ['trot.log.2010-01-0{0}'.format(i) for i in xrange(5, 9)][-backup_count:] with open(str(tmpdir.join('trot.log.2010-01-08'))) as f: assert f.readline().rstrip() == '[01:00] Last One' assert f.readline().rstrip() == '[02:00] Last One' if backup_count > 1: with open(str(tmpdir.join('trot.log.2010-01-07'))) as f: assert f.readline().rstrip() == '[01:00] Third One' assert f.readline().rstrip() == '[02:00] Third One' @pytest.mark.parametrize("backup_count", [1, 3]) @pytest.mark.parametrize("preexisting_file", [True, False]) def test_timed_rotating_file_handler__not_timed_filename_for_current( tmpdir, activation_strategy, backup_count, preexisting_file ): basename = str(tmpdir.join('trot.log')) if preexisting_file: with open(basename, 'w') as file: file.write('contents') jan_first = time.mktime(datetime(2010, 1, 1).timetuple()) os.utime(basename, (jan_first, jan_first)) handler = logbook.TimedRotatingFileHandler( basename, format_string='[{record.time:%H:%M}] {record.message}', backup_count=backup_count, rollover_format='{basename}{ext}.{timestamp}', timed_filename_for_current=False, ) def fake_record(message, year, month, day, hour=0, minute=0, second=0): lr = logbook.LogRecord('Test Logger', logbook.WARNING, message) lr.time = datetime(year, month, day, hour, minute, second) return lr with activation_strategy(handler): for x in xrange(10): handler.handle(fake_record('First One', 2010, 1, 5, x + 1)) for x in xrange(20): handler.handle(fake_record('Second One', 2010, 1, 6, x + 1)) for x in xrange(10): handler.handle(fake_record('Third One', 2010, 1, 7, x + 1)) for x in xrange(20): handler.handle(fake_record('Last One', 2010, 1, 8, x + 1)) computed_files = [x for x in os.listdir(str(tmpdir)) if x.startswith('trot')] expected_files = ['trot.log.2010-01-01'] if preexisting_file else [] expected_files += ['trot.log.2010-01-0{0}'.format(i) for i in xrange(5, 8)] expected_files += ['trot.log'] expected_files = expected_files[-backup_count:] assert sorted(computed_files) == sorted(expected_files) with open(str(tmpdir.join('trot.log'))) as f: assert f.readline().rstrip() == '[01:00] Last One' assert f.readline().rstrip() == '[02:00] Last One' if backup_count > 1: with open(str(tmpdir.join('trot.log.2010-01-07'))) as f: assert f.readline().rstrip() == '[01:00] Third One' assert f.readline().rstrip() == '[02:00] Third One' def _decompress(input_file_name, use_gzip=True): if use_gzip: with gzip.open(input_file_name, 'rb') as in_f: return in_f.read().decode() else: with open(input_file_name, 'rb') as in_f: return brotli.decompress(in_f.read()).decode() @pytest.mark.parametrize("use_gzip", [True, False]) def test_compression_file_handler(logfile, activation_strategy, logger, use_gzip): handler = logbook.GZIPCompressionHandler(logfile) if use_gzip else logbook.BrotliCompressionHandler(logfile) handler.format_string = '{record.level_name}:{record.channel}:{record.message}' with activation_strategy(handler): logger.warn('warning message') handler.close() assert _decompress(logfile, use_gzip) == 'WARNING:testlogger:warning message\n' logbook-1.5.3/tests/test_fingers_crossed_handler.py000066400000000000000000000051111355165376200226160ustar00rootroot00000000000000import logbook from .utils import capturing_stderr_context def test_fingerscrossed(activation_strategy, logger, default_handler): handler = logbook.FingersCrossedHandler(default_handler, logbook.WARNING) # if no warning occurs, the infos are not logged with activation_strategy(handler): with capturing_stderr_context() as captured: logger.info('some info') assert captured.getvalue() == '' assert (not handler.triggered) # but if it does, all log messages are output with activation_strategy(handler): with capturing_stderr_context() as captured: logger.info('some info') logger.warning('something happened') logger.info('something else happened') logs = captured.getvalue() assert 'some info' in logs assert 'something happened' in logs assert 'something else happened' in logs assert handler.triggered def test_fingerscrossed_factory(activation_strategy, logger): handlers = [] def handler_factory(record, fch): handler = logbook.TestHandler() handlers.append(handler) return handler def make_fch(): return logbook.FingersCrossedHandler(handler_factory, logbook.WARNING) fch = make_fch() with activation_strategy(fch): logger.info('some info') assert len(handlers) == 0 logger.warning('a warning') assert len(handlers) == 1 logger.error('an error') assert len(handlers) == 1 assert handlers[0].has_infos assert handlers[0].has_warnings assert handlers[0].has_errors assert (not handlers[0].has_notices) assert (not handlers[0].has_criticals) assert (not handlers[0].has_debugs) fch = make_fch() with activation_strategy(fch): logger.info('some info') logger.warning('a warning') assert len(handlers) == 2 def test_fingerscrossed_buffer_size(activation_strategy): logger = logbook.Logger('Test') test_handler = logbook.TestHandler() handler = logbook.FingersCrossedHandler(test_handler, buffer_size=3) with activation_strategy(handler): logger.info('Never gonna give you up') logger.warn('Aha!') logger.warn('Moar!') logger.error('Pure hate!') assert test_handler.formatted_records == ['[WARNING] Test: Aha!', '[WARNING] Test: Moar!', '[ERROR] Test: Pure hate!'] logbook-1.5.3/tests/test_flags.py000066400000000000000000000020611355165376200170370ustar00rootroot00000000000000import logbook import pytest from .utils import capturing_stderr_context def test_error_flag(logger): with capturing_stderr_context() as captured: with logbook.Flags(errors='print'): with logbook.Flags(errors='silent'): logger.warn('Foo {42}', 'aha') assert captured.getvalue() == '' with logbook.Flags(errors='silent'): with logbook.Flags(errors='print'): logger.warn('Foo {42}', 'aha') assert captured.getvalue() != '' with pytest.raises(Exception) as caught: with logbook.Flags(errors='raise'): logger.warn('Foo {42}', 'aha') assert 'Could not format message with provided arguments' in str( caught.value) def test_disable_introspection(logger): with logbook.Flags(introspection=False): with logbook.TestHandler() as h: logger.warn('Testing') assert h.records[0].frame is None assert h.records[0].calling_frame is None assert h.records[0].module is None logbook-1.5.3/tests/test_groups.py000066400000000000000000000043551355165376200172720ustar00rootroot00000000000000import logbook def test_groups(logger): def inject_extra(record): record.extra['foo'] = 'bar' group = logbook.LoggerGroup(processor=inject_extra) group.level = logbook.ERROR group.add_logger(logger) with logbook.TestHandler() as handler: logger.warn('A warning') logger.error('An error') assert (not handler.has_warning('A warning')) assert handler.has_error('An error') assert handler.records[0].extra['foo'] == 'bar' def test_group_disabled(): group = logbook.LoggerGroup() logger1 = logbook.Logger('testlogger1') logger2 = logbook.Logger('testlogger2') group.add_logger(logger1) group.add_logger(logger2) # Test group disable group.disable() with logbook.TestHandler() as handler: logger1.warn('Warning 1') logger2.warn('Warning 2') assert not handler.has_warnings # Test group enable group.enable() with logbook.TestHandler() as handler: logger1.warn('Warning 1') logger2.warn('Warning 2') assert handler.has_warning('Warning 1') assert handler.has_warning('Warning 2') # Test group disabled, but logger explicitly enabled group.disable() logger1.enable() with logbook.TestHandler() as handler: logger1.warn('Warning 1') logger2.warn('Warning 2') assert handler.has_warning('Warning 1') assert not handler.has_warning('Warning 2') # Logger 1 will be enabled by using force=True group.disable(force=True) with logbook.TestHandler() as handler: logger1.warn('Warning 1') logger2.warn('Warning 2') assert not handler.has_warning('Warning 1') assert not handler.has_warning('Warning 2') # Enabling without force means logger 1 will still be disabled. group.enable() with logbook.TestHandler() as handler: logger1.warn('Warning 1') logger2.warn('Warning 2') assert not handler.has_warning('Warning 1') assert handler.has_warning('Warning 2') # Force logger 1 enabled. group.enable(force=True) with logbook.TestHandler() as handler: logger1.warn('Warning 1') logger2.warn('Warning 2') assert handler.has_warning('Warning 1') assert handler.has_warning('Warning 2') logbook-1.5.3/tests/test_handler_errors.py000066400000000000000000000030071355165376200207550ustar00rootroot00000000000000import re import sys import logbook import pytest from .utils import capturing_stderr_context __file_without_pyc__ = __file__ if __file_without_pyc__.endswith('.pyc'): __file_without_pyc__ = __file_without_pyc__[:-1] def test_handler_exception(activation_strategy, logger): class ErroringHandler(logbook.TestHandler): def emit(self, record): raise RuntimeError('something bad happened') with capturing_stderr_context() as stderr: with activation_strategy(ErroringHandler()): logger.warn('I warn you.') assert 'something bad happened' in stderr.getvalue() assert 'I warn you' not in stderr.getvalue() def test_formatting_exception(): def make_record(): return logbook.LogRecord('Test Logger', logbook.WARNING, 'Hello {foo:invalid}', kwargs={'foo': 42}, frame=sys._getframe()) record = make_record() with pytest.raises(TypeError) as caught: record.message errormsg = str(caught.value) assert re.search( 'Could not format message with provided arguments: Invalid ' '(?:format specifier)|(?:conversion specification)|(?:format spec)', errormsg, re.M | re.S) assert "msg='Hello {foo:invalid}'" in errormsg assert 'args=()' in errormsg assert "kwargs={'foo': 42}" in errormsg assert re.search( r'Happened in file .*%s, line \d+' % re.escape(__file_without_pyc__), errormsg, re.M | re.S) logbook-1.5.3/tests/test_handlers.py000066400000000000000000000110241355165376200175420ustar00rootroot00000000000000import logbook from .utils import capturing_stderr_context, make_fake_mail_handler def test_custom_logger(activation_strategy, logger): client_ip = '127.0.0.1' class CustomLogger(logbook.Logger): def process_record(self, record): record.extra['ip'] = client_ip custom_log = CustomLogger('awesome logger') fmt = ('[{record.level_name}] {record.channel}: ' '{record.message} [{record.extra[ip]}]') handler = logbook.TestHandler(format_string=fmt) assert handler.format_string == fmt with activation_strategy(handler): custom_log.warn('Too many sounds') logger.warn('"Music" playing') assert handler.formatted_records == [ '[WARNING] awesome logger: Too many sounds [127.0.0.1]', '[WARNING] testlogger: "Music" playing []'] def test_custom_handling(activation_strategy, logger): class MyTestHandler(logbook.TestHandler): def handle(self, record): if record.extra.get('flag') != 'testing': return False return logbook.TestHandler.handle(self, record) # Check metaclass (== cls.__class__) assert logbook.Handler.__class__ == logbook.handlers._HandlerType class MyLogger(logbook.Logger): def process_record(self, record): logbook.Logger.process_record(self, record) record.extra['flag'] = 'testing' log = MyLogger() handler = MyTestHandler() with capturing_stderr_context() as captured: with activation_strategy(handler): log.warn('From my logger') logger.warn('From another logger') assert handler.has_warning('From my logger') assert 'From another logger' in captured.getvalue() def test_nested_setups(activation_strategy): with capturing_stderr_context() as captured: logger = logbook.Logger('App') test_handler = logbook.TestHandler(level='WARNING') mail_handler = make_fake_mail_handler(bubble=True) handlers = logbook.NestedSetup([ logbook.NullHandler(), test_handler, mail_handler ]) with activation_strategy(handlers): logger.warn('This is a warning') logger.error('This is also a mail') try: 1 / 0 except Exception: logger.exception() logger.warn('And here we go straight back to stderr') assert test_handler.has_warning('This is a warning') assert test_handler.has_error('This is also a mail') assert len(mail_handler.mails) == 2 assert 'This is also a mail' in mail_handler.mails[0][2] assert '1 / 0' in mail_handler.mails[1][2] assert 'And here we go straight back to stderr' in captured.getvalue() with activation_strategy(handlers): logger.warn('threadbound warning') handlers.push_application() try: logger.warn('applicationbound warning') finally: handlers.pop_application() def test_filtering(activation_strategy): logger1 = logbook.Logger('Logger1') logger2 = logbook.Logger('Logger2') handler = logbook.TestHandler() outer_handler = logbook.TestHandler() def only_1(record, handler): return record.dispatcher is logger1 handler.filter = only_1 with activation_strategy(outer_handler): with activation_strategy(handler): logger1.warn('foo') logger2.warn('bar') assert handler.has_warning('foo', channel='Logger1') assert (not handler.has_warning('bar', channel='Logger2')) assert (not outer_handler.has_warning('foo', channel='Logger1')) assert outer_handler.has_warning('bar', channel='Logger2') def test_different_context_pushing(activation_strategy): h1 = logbook.TestHandler(level=logbook.DEBUG) h2 = logbook.TestHandler(level=logbook.INFO) h3 = logbook.TestHandler(level=logbook.WARNING) logger = logbook.Logger('Testing') with activation_strategy(h1): with activation_strategy(h2): with activation_strategy(h3): logger.warn('Wuuu') logger.info('still awesome') logger.debug('puzzled') assert h1.has_debug('puzzled') assert h2.has_info('still awesome') assert h3.has_warning('Wuuu') for handler in h1, h2, h3: assert len(handler.records) == 1 def test_default_handlers(logger): with capturing_stderr_context() as stream: logger.warn('Aha!') captured = stream.getvalue() assert 'WARNING: testlogger: Aha!' in captured logbook-1.5.3/tests/test_helpers.py000066400000000000000000000021621355165376200174070ustar00rootroot00000000000000# -*- coding: utf-8 -*- from logbook.helpers import u from datetime import datetime import pytest def test_jsonhelper(): from logbook.helpers import to_safe_json class Bogus(object): def __str__(self): return 'bogus' rv = to_safe_json([ None, 'foo', u('jäger'), 1, datetime(2000, 1, 1), {'jäger1': 1, u('jäger2'): 2, Bogus(): 3, 'invalid': object()}, object() # invalid ]) assert rv == [None, u('foo'), u('jäger'), 1, '2000-01-01T00:00:00Z', {u('jäger1'): 1, u('jäger2'): 2, u('bogus'): 3, u('invalid'): None}, None] def test_datehelpers(): from logbook.helpers import format_iso8601, parse_iso8601 now = datetime.now() rv = format_iso8601() assert rv[:4] == str(now.year) with pytest.raises(ValueError): parse_iso8601('foo') v = parse_iso8601('2000-01-01T00:00:00.12Z') assert v.microsecond == 120000 v = parse_iso8601('2000-01-01T12:00:00+01:00') assert v.hour == 11 v = parse_iso8601('2000-01-01T12:00:00-01:00') assert v.hour == 13 logbook-1.5.3/tests/test_log_record.py000066400000000000000000000024001355165376200200570ustar00rootroot00000000000000import sys import logbook from .utils import capturing_stderr_context def test_exc_info_when_no_exceptions_exist(logger): with capturing_stderr_context() as captured: with logbook.StreamHandler(sys.stderr): logger.debug('message', exc_info=True) assert 'Traceback' not in captured.getvalue() def test_exc_info_false(): with logbook.handlers.TestHandler() as handler: logbook.debug('message here', exc_info=False) [record] = handler.records assert not record.formatted_exception def test_extradict(active_handler, logger): logger.warn('Test warning') record = active_handler.records[0] record.extra['existing'] = 'foo' assert record.extra['nonexisting'] == '' assert record.extra['existing'] == 'foo' def test_calling_frame(active_handler, logger): logger.warn('test') assert active_handler.records[0].calling_frame == sys._getframe() def test_frame_correction(active_handler, logger): def inner(): logger.warn('test', frame_correction=+1) inner() assert active_handler.records[0].calling_frame == sys._getframe() def test_dispatcher(active_handler, logger): logger.warn('Logbook is too awesome for stdlib') assert active_handler.records[0].dispatcher == logger logbook-1.5.3/tests/test_logbook.py000066400000000000000000000021461355165376200174030ustar00rootroot00000000000000import logbook import pytest def test_global_functions(activation_strategy): with activation_strategy(logbook.TestHandler()) as handler: logbook.debug('a debug message') logbook.info('an info message') logbook.warn('warning part 1') logbook.warning('warning part 2') logbook.notice('notice') logbook.error('an error') logbook.critical('pretty critical') logbook.log(logbook.CRITICAL, 'critical too') assert handler.has_debug('a debug message') assert handler.has_info('an info message') assert handler.has_warning('warning part 1') assert handler.has_warning('warning part 2') assert handler.has_notice('notice') assert handler.has_error('an error') assert handler.has_critical('pretty critical') assert handler.has_critical('critical too') assert handler.records[0].channel == 'Generic' assert handler.records[0].dispatcher is None def test_level_lookup_failures(): with pytest.raises(LookupError): logbook.get_level_name(37) with pytest.raises(LookupError): logbook.lookup_level('FOO') logbook-1.5.3/tests/test_logger.py000066400000000000000000000022351355165376200172250ustar00rootroot00000000000000import logbook import pytest def test_level_properties(logger): assert logger.level == logbook.NOTSET assert logger.level_name == 'NOTSET' logger.level_name = 'WARNING' assert logger.level == logbook.WARNING logger.level = logbook.ERROR assert logger.level_name == 'ERROR' def test_reflected_properties(logger): group = logbook.LoggerGroup() group.add_logger(logger) assert logger.group == group group.level = logbook.ERROR assert logger.level == logbook.ERROR assert logger.level_name == 'ERROR' group.level = logbook.WARNING assert logger.level == logbook.WARNING assert logger.level_name == 'WARNING' logger.level = logbook.CRITICAL group.level = logbook.DEBUG assert logger.level == logbook.CRITICAL assert logger.level_name == 'CRITICAL' group.remove_logger(logger) assert logger.group is None def test_disabled_property(): class MyLogger(logbook.Logger): @property def disabled(self): return True logger = MyLogger() with pytest.raises(AttributeError): logger.enable() with pytest.raises(AttributeError): logger.disable() logbook-1.5.3/tests/test_logging_api.py000066400000000000000000000054071355165376200202310ustar00rootroot00000000000000import pickle import sys import logbook from logbook.helpers import iteritems, xrange, u import pytest def test_basic_logging(active_handler, logger): logger.warn('This is a warning. Nice hah?') assert active_handler.has_warning('This is a warning. Nice hah?') assert active_handler.formatted_records == [ '[WARNING] testlogger: This is a warning. Nice hah?'] def test_exception_catching(active_handler, logger): assert not active_handler.has_error() try: 1 / 0 except Exception: logger.exception() try: 1 / 0 except Exception: logger.exception('Awesome') assert active_handler.has_error('Uncaught exception occurred') assert active_handler.has_error('Awesome') assert active_handler.records[0].exc_info is not None assert '1 / 0' in active_handler.records[0].formatted_exception def test_exception_catching_with_unicode(): """ See https://github.com/getlogbook/logbook/issues/104 """ try: raise Exception(u('\u202a test \u202c')) except: r = logbook.LogRecord('channel', 'DEBUG', 'test', exc_info=sys.exc_info()) r.exception_message @pytest.mark.parametrize('as_tuple', [True, False]) def test_exc_info(as_tuple, logger, active_handler): try: 1 / 0 except Exception: exc_info = sys.exc_info() logger.info("Exception caught", exc_info=exc_info if as_tuple else True) assert active_handler.records[0].exc_info is not None assert active_handler.records[0].exc_info == exc_info def test_to_dict(logger, active_handler): try: 1 / 0 except Exception: logger.exception() record = active_handler.records[0] exported = record.to_dict() record.close() imported = logbook.LogRecord.from_dict(exported) for key, value in iteritems(record.__dict__): if key[0] == '_': continue assert value == getattr(imported, key) def test_pickle(active_handler, logger): try: 1 / 0 except Exception: logger.exception() record = active_handler.records[0] record.pull_information() record.close() for p in xrange(pickle.HIGHEST_PROTOCOL): exported = pickle.dumps(record, p) imported = pickle.loads(exported) for key, value in iteritems(record.__dict__): if key[0] == '_': continue imported_value = getattr(imported, key) if isinstance(value, ZeroDivisionError): # in Python 3.2, ZeroDivisionError(x) != ZeroDivisionError(x) assert type(value) is type(imported_value) assert value.args == imported_value.args else: assert value == imported_value logbook-1.5.3/tests/test_logging_compat.py000066400000000000000000000060631355165376200207420ustar00rootroot00000000000000import functools from random import randrange import logbook import logbook.compat from logbook.helpers import StringIO import pytest from .utils import capturing_stderr_context __file_without_pyc__ = __file__ if __file_without_pyc__.endswith(".pyc"): __file_without_pyc__ = __file_without_pyc__[:-1] @pytest.mark.parametrize('set_root_logger_level', [True, False]) def test_basic_compat(request, set_root_logger_level): import logging from logbook.compat import redirected_logging # mimic the default logging setting request.addfinalizer(functools.partial( logging.root.setLevel, logging.root.level)) logging.root.setLevel(logging.WARNING) name = 'test_logbook-%d' % randrange(1 << 32) logger = logging.getLogger(name) with logbook.TestHandler(bubble=True) as handler: with capturing_stderr_context() as captured: with redirected_logging(set_root_logger_level): logger.debug('This is from the old system') logger.info('This is from the old system') logger.warning('This is from the old %s', 'system') logger.error('This is from the old system') logger.critical('This is from the old system') logger.error('This is a %(what)s %(where)s', {'what': 'mapping', 'where': 'test'}) assert ('WARNING: %s: This is from the old system' % name) in captured.getvalue() assert ('ERROR: %s: This is a mapping test' % name) in captured.getvalue() if set_root_logger_level: assert handler.records[0].level == logbook.DEBUG else: assert handler.records[0].level == logbook.WARNING assert handler.records[0].msg == 'This is from the old %s' def test_redirect_logbook(): import logging out = StringIO() logger = logging.getLogger() logbook_logger = logbook.Logger('testlogger') old_handlers = logger.handlers[:] handler = logging.StreamHandler(out) handler.setFormatter(logging.Formatter( '%(name)s:%(levelname)s:%(message)s')) logger.handlers[:] = [handler] try: with logbook.compat.LoggingHandler(): logbook_logger.warn("This goes to logging") pieces = out.getvalue().strip().split(':') assert pieces == ['testlogger', 'WARNING', 'This goes to logging'] finally: logger.handlers[:] = old_handlers from itertools import count test_warning_redirections_i = count() def test_warning_redirections(): from logbook.compat import redirected_warnings with logbook.TestHandler() as handler: redirector = redirected_warnings() redirector.start() try: from warnings import warn, resetwarnings resetwarnings() warn(RuntimeWarning('Testing' + str(next(test_warning_redirections_i)))) finally: redirector.end() assert len(handler.records) == 1 assert handler.formatted_records[0].startswith( '[WARNING] RuntimeWarning: Testing') assert __file_without_pyc__ in handler.records[0].filename logbook-1.5.3/tests/test_logging_times.py000066400000000000000000000055511355165376200206010ustar00rootroot00000000000000from datetime import datetime, timedelta, tzinfo import logbook import pytest from .utils import get_total_delta_seconds def test_timedate_format(activation_strategy, logger): """ tests the logbook.set_datetime_format() function """ FORMAT_STRING = '{record.time:%H:%M:%S.%f} {record.message}' handler = logbook.TestHandler(format_string=FORMAT_STRING) with activation_strategy(handler): logbook.set_datetime_format('utc') try: logger.warn('This is a warning.') time_utc = handler.records[0].time logbook.set_datetime_format('local') logger.warn('This is a warning.') time_local = handler.records[1].time finally: # put back the default time factory logbook.set_datetime_format('utc') # get the expected difference between local and utc time t1 = datetime.now() t2 = datetime.utcnow() tz_minutes_diff = get_total_delta_seconds(t1 - t2)/60.0 if abs(tz_minutes_diff) < 1: pytest.skip('Cannot test utc/localtime differences ' 'if they vary by less than one minute...') # get the difference between LogRecord local and utc times logbook_minutes_diff = get_total_delta_seconds(time_local - time_utc)/60.0 assert abs(logbook_minutes_diff) > 1, ( 'Localtime does not differ from UTC by more than 1 ' 'minute (Local: %s, UTC: %s)' % (time_local, time_utc)) ratio = logbook_minutes_diff / tz_minutes_diff assert ratio > 0.99 assert ratio < 1.01 def test_tz_aware(activation_strategy, logger): """ tests logbook.set_datetime_format() with a time zone aware time factory """ class utc(tzinfo): def tzname(self, dt): return 'UTC' def utcoffset(self, dt): return timedelta(seconds=0) def dst(self, dt): return timedelta(seconds=0) utc = utc() def utc_tz(): return datetime.now(tz=utc) FORMAT_STRING = '{record.time:%H:%M:%S.%f%z} {record.message}' handler = logbook.TestHandler(format_string=FORMAT_STRING) with activation_strategy(handler): logbook.set_datetime_format(utc_tz) try: logger.warn('this is a warning.') record = handler.records[0] finally: # put back the default time factory logbook.set_datetime_format('utc') assert record.time.tzinfo is not None def test_invalid_time_factory(): """ tests logbook.set_datetime_format() with an invalid time factory callable """ def invalid_factory(): return False with pytest.raises(ValueError) as e: try: logbook.set_datetime_format(invalid_factory) finally: # put back the default time factory logbook.set_datetime_format('utc') assert 'Invalid callable value' in str(e.value) logbook-1.5.3/tests/test_mail_handler.py000066400000000000000000000207021355165376200203640ustar00rootroot00000000000000import base64 import re import sys import logbook from logbook.helpers import u from .utils import capturing_stderr_context, make_fake_mail_handler try: from unittest.mock import Mock, call, patch except ImportError: from mock import Mock, call, patch __file_without_pyc__ = __file__ if __file_without_pyc__.endswith('.pyc'): __file_without_pyc__ = __file_without_pyc__[:-1] def test_mail_handler(activation_strategy, logger): subject = u('\xf8nicode') handler = make_fake_mail_handler(subject=subject) with capturing_stderr_context() as fallback: with activation_strategy(handler): logger.warn('This is not mailed') try: 1 / 0 except Exception: logger.exception(u('Viva la Espa\xf1a')) if not handler.mails: # if sending the mail failed, the reason should be on stderr assert False, fallback.getvalue() assert len(handler.mails) == 1 sender, receivers, mail = handler.mails[0] mail = mail.replace('\r', '') assert sender == handler.from_addr assert '=?utf-8?q?=C3=B8nicode?=' in mail header, data = mail.split('\n\n', 1) if 'Content-Transfer-Encoding: base64' in header: data = base64.b64decode(data).decode('utf-8') assert re.search(r'Message type:\s+ERROR', data) assert re.search(r'Location:.*%s' % re.escape(__file_without_pyc__), data) assert re.search(r'Module:\s+%s' % __name__, data) assert re.search(r'Function:\s+test_mail_handler', data) body = u('Viva la Espa\xf1a') if sys.version_info < (3, 0): body = body.encode('utf-8') assert body in data assert '\nTraceback (most' in data assert '1 / 0' in data assert 'This is not mailed' in fallback.getvalue() def test_mail_handler_batching(activation_strategy, logger): mail_handler = make_fake_mail_handler() handler = logbook.FingersCrossedHandler(mail_handler, reset=True) with activation_strategy(handler): logger.warn('Testing') logger.debug('Even more') logger.error('And this triggers it') logger.info('Aha') logger.error('And this triggers it again!') assert len(mail_handler.mails) == 2 mail = mail_handler.mails[0][2] pieces = mail.split('Log records that led up to this one:') assert len(pieces) == 2 body, rest = pieces rest = rest.replace('\r', '') assert re.search(r'Message type:\s+ERROR', body) assert re.search(r'Module:\s+%s' % __name__, body) assert re.search(r'Function:\s+test_mail_handler_batching', body) related = rest.strip().split('\n\n') assert len(related) == 2 assert re.search(r'Message type:\s+WARNING', related[0]) assert re.search(r'Message type:\s+DEBUG', related[1]) assert 'And this triggers it again' in mail_handler.mails[1][2] def test_group_handler_mail_combo(activation_strategy, logger): mail_handler = make_fake_mail_handler(level=logbook.DEBUG) handler = logbook.GroupHandler(mail_handler) with activation_strategy(handler): logger.error('The other way round') logger.warn('Testing') logger.debug('Even more') assert mail_handler.mails == [] assert len(mail_handler.mails) == 1 mail = mail_handler.mails[0][2] pieces = mail.split('Other log records in the same group:') assert len(pieces) == 2 body, rest = pieces rest = rest.replace('\r', '') assert re.search(r'Message type:\s+ERROR', body) assert re.search(r'Module:\s+' + __name__, body) assert re.search(r'Function:\s+test_group_handler_mail_combo', body) related = rest.strip().split('\n\n') assert len(related) == 2 assert re.search(r'Message type:\s+WARNING', related[0]) assert re.search(r'Message type:\s+DEBUG', related[1]) def test_mail_handler_arguments(): with patch('smtplib.SMTP', autospec=True) as mock_smtp: # Test the mail handler with supported arguments before changes to # secure, credentials, and starttls mail_handler = logbook.MailHandler( from_addr='from@example.com', recipients='to@example.com', server_addr=('server.example.com', 465), credentials=('username', 'password'), secure=('keyfile', 'certfile')) mail_handler.get_connection() assert mock_smtp.call_args == call('server.example.com', 465) assert mock_smtp.method_calls[1] == call().starttls( keyfile='keyfile', certfile='certfile') assert mock_smtp.method_calls[3] == call().login('username', 'password') # Test secure=() mail_handler = logbook.MailHandler( from_addr='from@example.com', recipients='to@example.com', server_addr=('server.example.com', 465), credentials=('username', 'password'), secure=()) mail_handler.get_connection() assert mock_smtp.call_args == call('server.example.com', 465) assert mock_smtp.method_calls[5] == call().starttls( certfile=None, keyfile=None) assert mock_smtp.method_calls[7] == call().login('username', 'password') # Test implicit port with string server_addr, dictionary credentials, # dictionary secure. mail_handler = logbook.MailHandler( from_addr='from@example.com', recipients='to@example.com', server_addr='server.example.com', credentials={'user': 'username', 'password': 'password'}, secure={'certfile': 'certfile2', 'keyfile': 'keyfile2'}) mail_handler.get_connection() assert mock_smtp.call_args == call('server.example.com', 465) assert mock_smtp.method_calls[9] == call().starttls( certfile='certfile2', keyfile='keyfile2') assert mock_smtp.method_calls[11] == call().login( user='username', password='password') # Test secure=True mail_handler = logbook.MailHandler( from_addr='from@example.com', recipients='to@example.com', server_addr=('server.example.com', 465), credentials=('username', 'password'), secure=True) mail_handler.get_connection() assert mock_smtp.call_args == call('server.example.com', 465) assert mock_smtp.method_calls[13] == call().starttls( certfile=None, keyfile=None) assert mock_smtp.method_calls[15] == call().login('username', 'password') assert len(mock_smtp.method_calls) == 16 # Test secure=False mail_handler = logbook.MailHandler( from_addr='from@example.com', recipients='to@example.com', server_addr=('server.example.com', 465), credentials=('username', 'password'), secure=False) mail_handler.get_connection() # starttls not called because we check len of method_calls before and # after this test. assert mock_smtp.call_args == call('server.example.com', 465) assert mock_smtp.method_calls[16] == call().login('username', 'password') assert len(mock_smtp.method_calls) == 17 with patch('smtplib.SMTP_SSL', autospec=True) as mock_smtp_ssl: # Test starttls=False mail_handler = logbook.MailHandler( from_addr='from@example.com', recipients='to@example.com', server_addr='server.example.com', credentials={'user': 'username', 'password': 'password'}, secure={'certfile': 'certfile', 'keyfile': 'keyfile'}, starttls=False) mail_handler.get_connection() assert mock_smtp_ssl.call_args == call( 'server.example.com', 465, keyfile='keyfile', certfile='certfile') assert mock_smtp_ssl.method_calls[0] == call().login( user='username', password='password') # Test starttls=False with secure=True mail_handler = logbook.MailHandler( from_addr='from@example.com', recipients='to@example.com', server_addr='server.example.com', credentials={'user': 'username', 'password': 'password'}, secure=True, starttls=False) mail_handler.get_connection() assert mock_smtp_ssl.call_args == call( 'server.example.com', 465, keyfile=None, certfile=None) assert mock_smtp_ssl.method_calls[1] == call().login( user='username', password='password') logbook-1.5.3/tests/test_more.py000066400000000000000000000156151355165376200167160ustar00rootroot00000000000000import sys import logbook from logbook.helpers import StringIO import pytest from .utils import capturing_stderr_context, missing, require_module @require_module('jinja2') def test_jinja_formatter(logger): from logbook.more import JinjaFormatter fmter = JinjaFormatter('{{ record.channel }}/{{ record.level_name }}') handler = logbook.TestHandler() handler.formatter = fmter with handler: logger.info('info') assert 'testlogger/INFO' in handler.formatted_records @missing('jinja2') def test_missing_jinja2(): from logbook.more import JinjaFormatter # check the RuntimeError is raised with pytest.raises(RuntimeError): JinjaFormatter('dummy') def test_colorizing_support(logger): from logbook.more import ColorizedStderrHandler class TestColorizingHandler(ColorizedStderrHandler): def __init__(self, *args, **kwargs): super(TestColorizingHandler, self).__init__(*args, **kwargs) self._obj_stream = StringIO() @property def stream(self): return self._obj_stream with TestColorizingHandler(format_string='{record.message}') as handler: handler.force_color() logger.error('An error') logger.warn('A warning') logger.debug('A debug message') lines = handler.stream.getvalue().rstrip('\n').splitlines() assert lines == [ '\x1b[31;01mAn error\x1b[39;49;00m', '\x1b[33;01mA warning\x1b[39;49;00m', '\x1b[37mA debug message\x1b[39;49;00m'] with TestColorizingHandler(format_string='{record.message}') as handler: handler.forbid_color() logger.error('An error') logger.warn('A warning') logger.debug('A debug message') lines = handler.stream.getvalue().rstrip('\n').splitlines() assert lines == ['An error', 'A warning', 'A debug message'] def test_tagged(default_handler): from logbook.more import TaggingLogger, TaggingHandler stream = StringIO() second_handler = logbook.StreamHandler(stream) logger = TaggingLogger('name', ['cmd']) handler = TaggingHandler(dict( info=default_handler, cmd=second_handler, both=[default_handler, second_handler], )) handler.bubble = False with handler: with capturing_stderr_context() as captured: logger.log('info', 'info message') logger.log('both', 'all message') logger.cmd('cmd message') stderr = captured.getvalue() assert 'info message' in stderr assert 'all message' in stderr assert 'cmd message' not in stderr stringio = stream.getvalue() assert 'info message' not in stringio assert 'all message' in stringio assert 'cmd message' in stringio def test_tagging_logger(default_handler): from logbook import StderrHandler from logbook.more import TaggingLogger logger = TaggingLogger('tagged', ['a', 'b']) handler = StderrHandler(format_string="{record.msg}|{record.extra[tags]}") with handler: with capturing_stderr_context() as captured: logger.a("a") logger.b("b") stderr = captured.getvalue() assert "a|['a']" in stderr assert "a|['b']" not in stderr assert "b|['b']" in stderr assert "b|['a']" not in stderr def test_external_application_handler(tmpdir, logger): from logbook.more import ExternalApplicationHandler as Handler fn = tmpdir.join('tempfile') handler = Handler([sys.executable, '-c', r'''if 1: f = open(%(tempfile)s, 'w') try: f.write('{record.message}\n') finally: f.close() ''' % {'tempfile': repr(str(fn))}]) with handler: logger.error('this is a really bad idea') with fn.open() as rf: contents = rf.read().strip() assert contents == 'this is a really bad idea' def test_exception_handler(logger): from logbook.more import ExceptionHandler with ExceptionHandler(ValueError): with pytest.raises(ValueError) as caught: logger.info('here i am') assert 'INFO: testlogger: here i am' in caught.value.args[0] def test_exception_handler_specific_level(logger): from logbook.more import ExceptionHandler with logbook.TestHandler() as test_handler: with pytest.raises(ValueError) as caught: with ExceptionHandler(ValueError, level='WARNING'): logger.info('this is irrelevant') logger.warn('here i am') assert 'WARNING: testlogger: here i am' in caught.value.args[0] assert 'this is irrelevant' in test_handler.records[0].message def test_dedup_handler(logger): from logbook.more import DedupHandler with logbook.TestHandler() as test_handler: with DedupHandler(): logger.info('foo') logger.info('bar') logger.info('foo') assert 2 == len(test_handler.records) assert 'message repeated 2 times: foo' in test_handler.records[0].message assert 'message repeated 1 times: bar' in test_handler.records[1].message class TestRiemannHandler(object): @require_module("riemann_client") def test_happy_path(self, logger): from logbook.more import RiemannHandler riemann_handler = RiemannHandler("127.0.0.1", 5555, message_type="test", level=logbook.INFO) null_handler = logbook.NullHandler() with null_handler.applicationbound(): with riemann_handler: logger.error("Something bad has happened") try: raise RuntimeError("For example, a RuntimeError") except Exception as ex: logger.exception(ex) logger.info("But now it is ok") q = riemann_handler.queue assert len(q) == 3 error_event = q[0] assert error_event["state"] == "error" exc_event = q[1] assert exc_event["description"] == "For example, a RuntimeError" info_event = q[2] assert info_event["state"] == "ok" @require_module("riemann_client") def test_incorrect_type(self): from logbook.more import RiemannHandler with pytest.raises(RuntimeError): RiemannHandler("127.0.0.1", 5555, message_type="fancy_type") @require_module("riemann_client") def test_flush(self, logger): from logbook.more import RiemannHandler riemann_handler = RiemannHandler("127.0.0.1", 5555, message_type="test", flush_threshold=2, level=logbook.INFO) null_handler = logbook.NullHandler() with null_handler.applicationbound(): with riemann_handler: logger.info("Msg #1") logger.info("Msg #2") logger.info("Msg #3") q = riemann_handler.queue assert len(q) == 1 assert q[0]["description"] == "Msg #3" logbook-1.5.3/tests/test_nteventlog_handler.py000066400000000000000000000036271355165376200216360ustar00rootroot00000000000000import os import logbook import pytest from .utils import require_module @require_module('win32con') @require_module('win32evtlog') @require_module('win32evtlogutil') @pytest.mark.skipif(os.environ.get('ENABLE_LOGBOOK_NTEVENTLOG_TESTS') is None, reason="Don't clutter NT Event Log unless enabled.") def test_nteventlog_handler(): from win32con import ( EVENTLOG_ERROR_TYPE, EVENTLOG_INFORMATION_TYPE, EVENTLOG_WARNING_TYPE) from win32evtlog import ( EVENTLOG_BACKWARDS_READ, EVENTLOG_SEQUENTIAL_READ, OpenEventLog, ReadEventLog) from win32evtlogutil import SafeFormatMessage logger = logbook.Logger('Test Logger') with logbook.NTEventLogHandler('Logbook Test Suite'): logger.info('The info log message.') logger.warning('The warning log message.') logger.error('The error log message.') def iter_event_log(handle, flags, offset): while True: events = ReadEventLog(handle, flags, offset) for event in events: yield event if not events: break handle = OpenEventLog(None, 'Application') flags = EVENTLOG_BACKWARDS_READ | EVENTLOG_SEQUENTIAL_READ for event in iter_event_log(handle, flags, 0): source = str(event.SourceName) if source == 'Logbook Test Suite': message = SafeFormatMessage(event, 'Application') if 'Message Level: INFO' in message: assert 'The info log message' in message assert event.EventType == EVENTLOG_INFORMATION_TYPE if 'Message Level: WARNING' in message: assert 'The warning log message' in message assert event.EventType == EVENTLOG_WARNING_TYPE if 'Message Level: ERROR' in message: assert 'The error log message' in message assert event.EventType == EVENTLOG_ERROR_TYPE logbook-1.5.3/tests/test_null_handler.py000066400000000000000000000032451355165376200204170ustar00rootroot00000000000000import logbook from .utils import capturing_stderr_context def test_null_handler(activation_strategy, logger): with capturing_stderr_context() as captured: with activation_strategy(logbook.NullHandler()): with activation_strategy(logbook.TestHandler(level='ERROR')) as handler: logger.error('An error') logger.warn('A warning') assert captured.getvalue() == '' assert (not handler.has_warning('A warning')) assert handler.has_error('An error') def test_blackhole_setting(activation_strategy): null_handler = logbook.NullHandler() heavy_init = logbook.LogRecord.heavy_init with activation_strategy(null_handler): def new_heavy_init(self): raise RuntimeError('should not be triggered') logbook.LogRecord.heavy_init = new_heavy_init try: with activation_strategy(null_handler): logbook.warn('Awesome') finally: logbook.LogRecord.heavy_init = heavy_init null_handler.bubble = True with capturing_stderr_context() as captured: logbook.warning('Not a blockhole') assert captured.getvalue() != '' def test_null_handler_filtering(activation_strategy): logger1 = logbook.Logger("1") logger2 = logbook.Logger("2") outer = logbook.TestHandler() inner = logbook.NullHandler() inner.filter = lambda record, handler: record.dispatcher is logger1 with activation_strategy(outer): with activation_strategy(inner): logger1.warn("1") logger2.warn("2") assert outer.has_warning('2', channel='2') assert (not outer.has_warning('1', channel='1')) logbook-1.5.3/tests/test_processors.py000066400000000000000000000064501355165376200201530ustar00rootroot00000000000000from textwrap import dedent import logbook from .utils import make_fake_mail_handler def test_handler_filter_after_processor(activation_strategy, logger): handler = make_fake_mail_handler( format_string=dedent(''' Subject: Application Error for {record.extra[path]} [{record.extra[method]}] Message type: {record.level_name} Location: {record.filename}:{record.lineno} Module: {record.module} Function: {record.func_name} Time: {record.time:%Y-%m-%d %H:%M:%S} Remote IP: {record.extra[ip]} Request: {record.extra[path]} [{record.extra[method]}] Message: {record.message} ''').lstrip(), filter=lambda r, h: 'ip' in r.extra, bubble=False) class Request(object): remote_addr = '127.0.0.1' method = 'GET' path = '/index.html' def handle_request(request): def inject_extra(record): record.extra['ip'] = request.remote_addr record.extra['method'] = request.method record.extra['path'] = request.path processor = logbook.Processor(inject_extra) with activation_strategy(processor): handler.push_thread() try: try: 1 / 0 except Exception: logger.exception('Exception happened during request') finally: handler.pop_thread() handle_request(Request()) assert len(handler.mails) == 1 mail = handler.mails[0][2] assert 'Subject: Application Error for /index.html [GET]' in mail assert '1 / 0' in mail def test_handler_processors(activation_strategy, logger): handler = make_fake_mail_handler( format_string=dedent(''' Subject: Application Error for {record.extra[path]} [{record.extra[method]}] Message type: {record.level_name} Location: {record.filename}:{record.lineno} Module: {record.module} Function: {record.func_name} Time: {record.time:%Y-%m-%d %H:%M:%S} Remote IP: {record.extra[ip]} Request: {record.extra[path]} [{record.extra[method]}] Message: {record.message} ''').lstrip()) class Request(object): remote_addr = '127.0.0.1' method = 'GET' path = '/index.html' def handle_request(request): def inject_extra(record): record.extra['ip'] = request.remote_addr record.extra['method'] = request.method record.extra['path'] = request.path processor = logbook.Processor(inject_extra) with activation_strategy(processor): handler.push_thread() try: try: 1 / 0 except Exception: logger.exception('Exception happened during request') finally: handler.pop_thread() handle_request(Request()) assert len(handler.mails) == 1 mail = handler.mails[0][2] assert 'Subject: Application Error for /index.html [GET]' in mail assert '1 / 0' in mail logbook-1.5.3/tests/test_queues.py000066400000000000000000000260501355165376200172560ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os import socket import time from .utils import require_module, missing, LETTERS import logbook from logbook.helpers import u import pytest @require_module('zmq') def test_zeromq_handler(logger, handlers, subscriber): tests = [ u('Logging something'), u('Something with umlauts äöü'), u('Something else for good measure'), ] for test in tests: for handler in handlers: with handler: logger.warn(test) record = subscriber.recv() assert record.message == test assert record.channel == logger.name @require_module('zmq') def test_zeromq_background_thread(logger, handlers, subscriber): test_handler = logbook.TestHandler() controller = subscriber.dispatch_in_background(test_handler) for handler in handlers: with handler: logger.warn('This is a warning') logger.error('This is an error') # stop the controller. This will also stop the loop and join the # background process. Before that we give it a fraction of a second # to get all results time.sleep(0.5) controller.stop() assert test_handler.has_warning('This is a warning') assert test_handler.has_error('This is an error') @missing('zmq') def test_missing_zeromq(): from logbook.queues import ZeroMQHandler, ZeroMQSubscriber with pytest.raises(RuntimeError): ZeroMQHandler('tcp://127.0.0.1:42000') with pytest.raises(RuntimeError): ZeroMQSubscriber('tcp://127.0.0.1:42000') class MultiProcessingHandlerSendBack(object): def __init__(self, queue): self.queue = queue def __call__(self): from logbook.queues import MultiProcessingHandler handler = MultiProcessingHandler(self.queue) handler.push_thread() try: logbook.warn('Hello World') finally: handler.pop_thread() @require_module('multiprocessing') def test_multi_processing_handler(): if os.getenv('APPVEYOR') == 'True': pytest.skip('Test hangs on AppVeyor CI') from multiprocessing import Process, Queue from logbook.queues import MultiProcessingSubscriber queue = Queue(-1) test_handler = logbook.TestHandler() subscriber = MultiProcessingSubscriber(queue) p = Process(target=MultiProcessingHandlerSendBack(queue)) p.start() p.join() with test_handler: subscriber.dispatch_once() assert test_handler.has_warning('Hello World') class BatchTestHandler(logbook.TestHandler): def __init__(self, *args, **kwargs): super(BatchTestHandler, self).__init__(*args, **kwargs) self.batches = [] def emit(self, record): super(BatchTestHandler, self).emit(record) self.batches.append([record]) def emit_batch(self, records, reason): for record in records: super(BatchTestHandler, self).emit(record) self.batches.append(records) def test_threaded_wrapper_handler(logger): from logbook.queues import ThreadedWrapperHandler test_handler = BatchTestHandler() with ThreadedWrapperHandler(test_handler) as handler: logger.warn('Just testing') logger.error('More testing') # give it some time to sync up handler.close() assert (not handler.controller.running) assert len(test_handler.records) == 2 assert len(test_handler.batches) == 2 assert all((len(records) == 1 for records in test_handler.batches)) assert test_handler.has_warning('Just testing') assert test_handler.has_error('More testing') def test_threaded_wrapper_handler_emit(): from logbook.queues import ThreadedWrapperHandler test_handler = BatchTestHandler() with ThreadedWrapperHandler(test_handler) as handler: lr = logbook.LogRecord('Test Logger', logbook.WARNING, 'Just testing') test_handler.emit(lr) lr = logbook.LogRecord('Test Logger', logbook.ERROR, 'More testing') test_handler.emit(lr) # give it some time to sync up handler.close() assert (not handler.controller.running) assert len(test_handler.records) == 2 assert len(test_handler.batches) == 2 assert all((len(records) == 1 for records in test_handler.batches)) assert test_handler.has_warning('Just testing') assert test_handler.has_error('More testing') def test_threaded_wrapper_handler_emit_batched(): from logbook.queues import ThreadedWrapperHandler test_handler = BatchTestHandler() with ThreadedWrapperHandler(test_handler) as handler: test_handler.emit_batch([ logbook.LogRecord('Test Logger', logbook.WARNING, 'Just testing'), logbook.LogRecord('Test Logger', logbook.ERROR, 'More testing'), ], 'group') # give it some time to sync up handler.close() assert (not handler.controller.running) assert len(test_handler.records) == 2 assert len(test_handler.batches) == 1 (records, ) = test_handler.batches assert len(records) == 2 assert test_handler.has_warning('Just testing') assert test_handler.has_error('More testing') @require_module('execnet') def test_execnet_handler(): def run_on_remote(channel): import logbook from logbook.queues import ExecnetChannelHandler handler = ExecnetChannelHandler(channel) log = logbook.Logger('Execnet') handler.push_application() log.info('Execnet works') import execnet gw = execnet.makegateway() channel = gw.remote_exec(run_on_remote) from logbook.queues import ExecnetChannelSubscriber subscriber = ExecnetChannelSubscriber(channel) record = subscriber.recv() assert record.msg == 'Execnet works' gw.exit() class SubscriberGroupSendBack(object): def __init__(self, message, queue): self.message = message self.queue = queue def __call__(self): from logbook.queues import MultiProcessingHandler with MultiProcessingHandler(self.queue): logbook.warn(self.message) @require_module('multiprocessing') def test_subscriber_group(): if os.getenv('APPVEYOR') == 'True': pytest.skip('Test hangs on AppVeyor CI') from multiprocessing import Process, Queue from logbook.queues import MultiProcessingSubscriber, SubscriberGroup a_queue = Queue(-1) b_queue = Queue(-1) subscriber = SubscriberGroup([ MultiProcessingSubscriber(a_queue), MultiProcessingSubscriber(b_queue) ]) for _ in range(10): p1 = Process(target=SubscriberGroupSendBack('foo', a_queue)) p2 = Process(target=SubscriberGroupSendBack('bar', b_queue)) p1.start() p2.start() p1.join() p2.join() messages = [subscriber.recv().message for i in (1, 2)] assert sorted(messages) == ['bar', 'foo'] @require_module('redis') def test_redis_handler(): import redis from logbook.queues import RedisHandler KEY = 'redis-{}'.format(os.getpid()) FIELDS = ['message', 'host'] r = redis.Redis(decode_responses=True) redis_handler = RedisHandler(key=KEY, level=logbook.INFO, bubble=True) # We don't want output for the tests, so we can wrap everything in a # NullHandler null_handler = logbook.NullHandler() # Check default values with null_handler.applicationbound(): with redis_handler: logbook.info(LETTERS) key, message = r.blpop(KEY) # Are all the fields in the record? for field in FIELDS: assert message.find(field) assert key == KEY assert message.find(LETTERS) # Change the key of the handler and check on redis KEY = 'test_another_key-{}'.format(os.getpid()) redis_handler.key = KEY with null_handler.applicationbound(): with redis_handler: logbook.info(LETTERS) key, message = r.blpop(KEY) assert key == KEY # Check that extra fields are added if specified when creating the handler FIELDS.append('type') extra_fields = {'type': 'test'} del(redis_handler) redis_handler = RedisHandler(key=KEY, level=logbook.INFO, extra_fields=extra_fields, bubble=True) with null_handler.applicationbound(): with redis_handler: logbook.info(LETTERS) key, message = r.blpop(KEY) for field in FIELDS: assert message.find(field) assert message.find('test') # And finally, check that fields are correctly added if appended to the # log message FIELDS.append('more_info') with null_handler.applicationbound(): with redis_handler: logbook.info(LETTERS, more_info='This works') key, message = r.blpop(KEY) for field in FIELDS: assert message.find(field) assert message.find('This works') @require_module('redis') def test_redis_handler_lpush(): """ Test if lpush stores messages in the right order new items should be first on list """ import redis from logbook.queues import RedisHandler null_handler = logbook.NullHandler() KEY = 'lpushed-'.format(os.getpid()) redis_handler = RedisHandler(key=KEY, push_method='lpush', level=logbook.INFO, bubble=True) with null_handler.applicationbound(): with redis_handler: logbook.info("old item") logbook.info("new item") time.sleep(1.5) r = redis.Redis(decode_responses=True) logs = r.lrange(KEY, 0, -1) assert logs assert "new item" in logs[0] r.delete(KEY) @require_module('redis') def test_redis_handler_rpush(): """ Test if rpush stores messages in the right order old items should be first on list """ import redis from logbook.queues import RedisHandler null_handler = logbook.NullHandler() KEY = 'rpushed-' + str(os.getpid()) redis_handler = RedisHandler(key=KEY, push_method='rpush', level=logbook.INFO, bubble=True) with null_handler.applicationbound(): with redis_handler: logbook.info("old item") logbook.info("new item") time.sleep(1.5) r = redis.Redis(decode_responses=True) logs = r.lrange(KEY, 0, -1) assert logs assert "old item" in logs[0] r.delete(KEY) @pytest.fixture def handlers(handlers_subscriber): return handlers_subscriber[0] @pytest.fixture def subscriber(handlers_subscriber): return handlers_subscriber[1] @pytest.fixture def handlers_subscriber(multi): from logbook.queues import ZeroMQHandler, ZeroMQSubscriber # Get an unused port tempsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) tempsock.bind(('127.0.0.1', 0)) host, unused_port = tempsock.getsockname() tempsock.close() # Retrieve the ZeroMQ handler and subscriber uri = 'tcp://%s:%d' % (host, unused_port) if multi: handlers = [ZeroMQHandler(uri, multi=True) for _ in range(3)] else: handlers = [ZeroMQHandler(uri)] subscriber = ZeroMQSubscriber(uri, multi=multi) # Enough time to start time.sleep(0.1) return handlers, subscriber @pytest.fixture(params=[True, False]) def multi(request): return request.param logbook-1.5.3/tests/test_syslog_handler.py000066400000000000000000000041421355165376200207620ustar00rootroot00000000000000import os import re import socket from contextlib import closing import logbook import pytest UNIX_SOCKET = "/tmp/__unixsock_logbook.test" DELIMITERS = { socket.AF_INET: '\n' } TO_TEST = [ (socket.AF_INET, socket.SOCK_DGRAM, ('127.0.0.1', 0)), (socket.AF_INET, socket.SOCK_STREAM, ('127.0.0.1', 0)), ] UNIX_SOCKET_AVAILABLE = hasattr(socket, 'AF_UNIX') if UNIX_SOCKET_AVAILABLE: DELIMITERS[socket.AF_UNIX] = '\x00' TO_TEST.append((socket.AF_UNIX, socket.SOCK_DGRAM, UNIX_SOCKET)) @pytest.mark.usefixtures("unix_sock_path") @pytest.mark.parametrize("sock_family,socktype,address", TO_TEST) @pytest.mark.parametrize("app_name", [None, 'Testing']) def test_syslog_handler(logger, activation_strategy, sock_family, socktype, address, app_name): delimiter = DELIMITERS[sock_family] with closing(socket.socket(sock_family, socktype)) as inc: inc.bind(address) if socktype == socket.SOCK_STREAM: inc.listen(0) inc.settimeout(1) if UNIX_SOCKET_AVAILABLE and sock_family == socket.AF_UNIX: expected = (r'^<12>%stestlogger: Syslog is weird%s$' % (app_name + ':' if app_name else '', delimiter)) else: expected = (r'^<12>1 \d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?Z %s %s %d - - %sSyslog is weird%s$' % ( socket.gethostname(), app_name if app_name else 'testlogger', os.getpid(), 'testlogger: ' if app_name else '', delimiter)) handler = logbook.SyslogHandler(app_name, inc.getsockname(), socktype=socktype) with activation_strategy(handler): logger.warn('Syslog is weird') if socktype == socket.SOCK_STREAM: with closing(inc.accept()[0]) as inc2: rv = inc2.recv(1024) else: rv = inc.recvfrom(1024)[0] rv = rv.decode('utf-8') assert re.match(expected, rv), \ 'expected {}, got {}'.format(expected, rv) @pytest.fixture def unix_sock_path(): try: yield UNIX_SOCKET finally: if os.path.exists(UNIX_SOCKET): os.unlink(UNIX_SOCKET) logbook-1.5.3/tests/test_test_handler.py000066400000000000000000000027771355165376200204350ustar00rootroot00000000000000import re import pytest @pytest.mark.parametrize("level, method", [ ("trace", "has_traces"), ("debug", "has_debugs"), ("info", "has_infos"), ("notice", "has_notices"), ("warning", "has_warnings"), ("error", "has_errors"), ("critical", "has_criticals"), ]) def test_has_level(active_handler, logger, level, method): log = getattr(logger, level) log('Hello World') assert getattr(active_handler, method) @pytest.mark.parametrize("level, method", [ ("trace", "has_trace"), ("debug", "has_debug"), ("info", "has_info"), ("notice", "has_notice"), ("warning", "has_warning"), ("error", "has_error"), ("critical", "has_critical"), ]) def test_regex_matching(active_handler, logger, level, method): log = getattr(logger, level) log('Hello World') has_level_method = getattr(active_handler, method) assert has_level_method(re.compile('^Hello')) assert (not has_level_method(re.compile('world$'))) assert (not has_level_method('^Hello World')) def test_test_handler_cache(active_handler, logger): logger.warn('First line') assert len(active_handler.formatted_records) == 1 # store cache, to make sure it is identifiable cache = active_handler.formatted_records assert len(active_handler.formatted_records) == 1 assert cache is active_handler.formatted_records logger.warn('Second line invalidates cache') assert len(active_handler.formatted_records) == 2 assert (cache is not active_handler.formatted_records) logbook-1.5.3/tests/test_ticketing.py000066400000000000000000000042431355165376200177300ustar00rootroot00000000000000import os import sys try: from thread import get_ident except ImportError: from _thread import get_ident import logbook import pytest from logbook.helpers import xrange from .utils import require_module __file_without_pyc__ = __file__ if __file_without_pyc__.endswith(".pyc"): __file_without_pyc__ = __file_without_pyc__[:-1] python_version = sys.version_info[:2] @pytest.mark.xfail( os.name == 'nt' and (python_version == (3, 2) or python_version == (3, 3)), reason='Problem with in-memory sqlite on Python 3.2, 3.3 and Windows') @require_module('sqlalchemy') def test_basic_ticketing(logger): from logbook.ticketing import TicketingHandler from time import sleep with TicketingHandler('sqlite:///') as handler: for x in xrange(5): logger.warn('A warning') sleep(0.2) logger.info('An error') sleep(0.2) if x < 2: try: 1 / 0 except Exception: logger.exception() assert handler.db.count_tickets() == 3 tickets = handler.db.get_tickets() assert len(tickets) == 3 assert tickets[0].level == logbook.INFO assert tickets[1].level == logbook.WARNING assert tickets[2].level == logbook.ERROR assert tickets[0].occurrence_count == 5 assert tickets[1].occurrence_count == 5 assert tickets[2].occurrence_count == 2 assert tickets[0].last_occurrence.level == logbook.INFO tickets[0].solve() assert tickets[0].solved tickets[0].delete() ticket = handler.db.get_ticket(tickets[1].ticket_id) assert ticket == tickets[1] occurrences = handler.db.get_occurrences(tickets[2].ticket_id, order_by='time') assert len(occurrences) == 2 record = occurrences[0] assert __file_without_pyc__ in record.filename # avoid 2to3 destroying our assertion assert getattr(record, 'func_name') == 'test_basic_ticketing' assert record.level == logbook.ERROR assert record.thread == get_ident() assert record.process == os.getpid() assert record.channel == 'testlogger' assert '1 / 0' in record.formatted_exception logbook-1.5.3/tests/test_unicode.py000066400000000000000000000042131355165376200173720ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .utils import require_py3, capturing_stderr_context import logbook @require_py3 def test_default_format_unicode(logger): with capturing_stderr_context() as stream: logger.warn('\u2603') assert 'WARNING: testlogger: \u2603' in stream.getvalue() @require_py3 def test_default_format_encoded(logger): with capturing_stderr_context() as stream: # it's a string but it's in the right encoding so don't barf logger.warn('\u2603') assert 'WARNING: testlogger: \u2603' in stream.getvalue() @require_py3 def test_default_format_bad_encoding(logger): with capturing_stderr_context() as stream: # it's a string, is wrong, but just dump it in the logger, # don't try to decode/encode it logger.warn('Русский'.encode('koi8-r')) expected = "WARNING: testlogger: b'\\xf2\\xd5\\xd3\\xd3\\xcb\\xc9\\xca'" assert expected in stream.getvalue() @require_py3 def test_custom_unicode_format_unicode(logger): format_string = ('[{record.level_name}] ' '{record.channel}: {record.message}') with capturing_stderr_context() as stream: with logbook.StderrHandler(format_string=format_string): logger.warn("\u2603") assert '[WARNING] testlogger: \u2603' in stream.getvalue() @require_py3 def test_custom_string_format_unicode(logger): format_string = ('[{record.level_name}] ' '{record.channel}: {record.message}') with capturing_stderr_context() as stream: with logbook.StderrHandler(format_string=format_string): logger.warn('\u2603') assert '[WARNING] testlogger: \u2603' in stream.getvalue() @require_py3 def test_unicode_message_encoded_params(logger): with capturing_stderr_context() as stream: logger.warn("\u2603 {0}", "\u2603".encode('utf8')) assert "WARNING: testlogger: \u2603 b'\\xe2\\x98\\x83'" in stream.getvalue() @require_py3 def test_encoded_message_unicode_params(logger): with capturing_stderr_context() as stream: logger.warn('\u2603 {0}'.encode('utf8'), '\u2603') assert 'WARNING: testlogger: \u2603 \u2603' in stream.getvalue() logbook-1.5.3/tests/test_utils.py000066400000000000000000000125701355165376200171110ustar00rootroot00000000000000import pytest import logbook from logbook.utils import ( logged_if_slow, deprecated, forget_deprecation_locations, suppressed_deprecations, log_deprecation_message) from time import sleep _THRESHOLD = 0.1 try: from unittest.mock import Mock, call except ImportError: from mock import Mock, call def test_logged_if_slow_reached(test_handler): with test_handler.applicationbound(): with logged_if_slow('checking...', threshold=_THRESHOLD): sleep(2 * _THRESHOLD) assert len(test_handler.records) == 1 [record] = test_handler.records assert record.message == 'checking...' def test_logged_if_slow_did_not_reached(test_handler): with test_handler.applicationbound(): with logged_if_slow('checking...', threshold=_THRESHOLD): sleep(_THRESHOLD / 2) assert len(test_handler.records) == 0 def test_logged_if_slow_logger(): logger = Mock() with logged_if_slow('checking...', threshold=_THRESHOLD, logger=logger): sleep(2 * _THRESHOLD) assert logger.log.call_args == call(logbook.DEBUG, 'checking...') def test_logged_if_slow_level(test_handler): with test_handler.applicationbound(): with logged_if_slow('checking...', threshold=_THRESHOLD, level=logbook.WARNING): sleep(2 * _THRESHOLD) assert test_handler.records[0].level == logbook.WARNING def test_logged_if_slow_deprecated(logger, test_handler): with test_handler.applicationbound(): with logged_if_slow('checking...', threshold=_THRESHOLD, func=logbook.error): sleep(2 * _THRESHOLD) assert test_handler.records[0].level == logbook.ERROR assert test_handler.records[0].message == 'checking...' with pytest.raises(TypeError): logged_if_slow('checking...', logger=logger, func=logger.error) def test_deprecated_func_called(capture): assert deprecated_func(1, 2) == 3 def test_deprecation_message(capture): deprecated_func(1, 2) [record] = capture.records assert "deprecated" in record.message assert 'deprecated_func' in record.message def test_deprecation_with_message(capture): @deprecated("use something else instead") def func(a, b): return a + b func(1, 2) [record] = capture.records assert "use something else instead" in record.message assert "func is deprecated" in record.message def test_no_deprecations(capture): @deprecated('msg') def func(a, b): return a + b with suppressed_deprecations(): assert func(1, 2) == 3 assert not capture.records def _no_decorator(func): return func @pytest.mark.parametrize('decorator', [_no_decorator, classmethod]) def test_class_deprecation(capture, decorator): class Bla(object): @deprecated('reason') @classmethod def func(self, a, b): assert isinstance(self, Bla) return a + b assert Bla().func(2, 4) == 6 [record] = capture.records assert 'Bla.func is deprecated' in record.message def test_deprecations_different_sources(capture): def f(): deprecated_func(1, 2) def g(): deprecated_func(1, 2) f() g() assert len(capture.records) == 2 def test_deprecations_same_sources(capture): def f(): deprecated_func(1, 2) f() f() assert len(capture.records) == 1 def test_deprecation_message_different_sources(capture): def f(flag): if flag: log_deprecation_message('first message type') else: log_deprecation_message('second message type') f(True) f(False) assert len(capture.records) == 2 def test_deprecation_message_same_sources(capture): def f(flag): if flag: log_deprecation_message('first message type') else: log_deprecation_message('second message type') f(True) f(True) assert len(capture.records) == 1 def test_deprecation_message_full_warning(capture): def f(): log_deprecation_message('some_message') f() [record] = capture.records assert record.message == 'Deprecation message: some_message' def test_name_doc(): @deprecated def some_func(): """docstring here""" pass assert some_func.__name__ == 'some_func' assert 'docstring here' in some_func.__doc__ def test_doc_update(): @deprecated('some_message') def some_func(): """docstring here""" pass some_func.__doc__ = 'new_docstring' assert 'docstring here' not in some_func.__doc__ assert 'new_docstring' in some_func.__doc__ assert 'some_message' in some_func.__doc__ def test_deprecatd_docstring(): message = "Use something else instead" @deprecated() def some_func(): """This is a function """ @deprecated(message) def other_func(): """This is another function """ assert ".. deprecated" in some_func.__doc__ assert ".. deprecated\n {0}".format(message) in other_func.__doc__ @pytest.fixture def capture(request): handler = logbook.TestHandler(level=logbook.WARNING) handler.push_application() @request.addfinalizer def pop(): handler.pop_application() return handler @deprecated def deprecated_func(a, b): return a + b @pytest.fixture(autouse=True) def forget_locations(): forget_deprecation_locations() logbook-1.5.3/tests/utils.py000066400000000000000000000047131355165376200160520ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ test utils for logbook ~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ import functools import os import sys from contextlib import contextmanager import logbook from logbook.helpers import StringIO import pytest _missing = object() LETTERS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" def get_total_delta_seconds(delta): """ Replacement for datetime.timedelta.total_seconds() for Python 2.5, 2.6 and 3.1 """ return (delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 10**6) / 10**6 require_py3 = pytest.mark.skipif( sys.version_info[0] < 3, reason="Requires Python 3") appveyor = pytest.mark.skipif( os.environ.get('APPVEYOR') != 'True', reason='AppVeyor CI test') travis = pytest.mark.skipif( os.environ.get('TRAVIS') != 'true', reason='Travis CI test') def require_module(module_name): found = True try: __import__(module_name) except ImportError: found = False return pytest.mark.skipif( not found, reason='Module {0} is required'.format(module_name)) def make_fake_mail_handler(**kwargs): class FakeMailHandler(logbook.MailHandler): mails = [] def get_connection(self): return self def close_connection(self, con): pass def sendmail(self, fromaddr, recipients, mail): self.mails.append((fromaddr, recipients, mail)) kwargs.setdefault('level', logbook.ERROR) return FakeMailHandler('foo@example.com', ['bar@example.com'], **kwargs) def missing(name): def decorate(f): @functools.wraps(f) def wrapper(*args, **kwargs): old = sys.modules.get(name, _missing) sys.modules[name] = None try: f(*args, **kwargs) finally: if old is _missing: del sys.modules[name] else: sys.modules[name] = old return wrapper return decorate def activate_via_with_statement(handler): return handler @contextmanager def activate_via_push_pop(handler): handler.push_thread() try: yield handler finally: handler.pop_thread() @contextmanager def capturing_stderr_context(): original = sys.stderr sys.stderr = StringIO() try: yield sys.stderr finally: sys.stderr = original logbook-1.5.3/tox.ini000066400000000000000000000015571355165376200145140ustar00rootroot00000000000000[tox] envlist = py{27,35,36,37}{,-speedups},pypy,py37-docs skipsdist = True [testenv] whitelist_externals = rm deps = py{27}: mock pytest speedups: Cython setenv = !speedups: DISABLE_LOGBOOK_CEXT=1 !speedups: DISABLE_LOGBOOK_CEXT_AT_RUNTIME=1 changedir = {toxinidir} commands = {envpython} -m pip install -e {toxinidir}[all] # Make sure that speedups are available/not available, as needed. speedups: {envpython} -c "from logbook.base import _has_speedups; exit(0 if _has_speedups else 1)" !speedups: {envpython} -c "from logbook.base import _has_speedups; exit(1 if _has_speedups else 0)" {envpython} {toxinidir}/scripts/test_setup.py py.test {toxinidir}/tests [testenv:py37-docs] deps = Sphinx>=1.3 changedir = docs commands = sphinx-build -W -b html . _build/html sphinx-build -W -b linkcheck . _build/linkcheck logbook-1.5.3/twitter-secrets.txt000066400000000000000000000003661355165376200171070ustar00rootroot00000000000000Leaked Twitter Secrets Twitter for Android xauth: yes key: 3nVuSoBZnx6U4vzUxf5w secret: Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys Echofon: xauth: yes key: yqoymTNrS9ZDGsBnlFhIuw secret: OMai1whT3sT3XMskI7DZ7xiju5i5rAYJnxSEHaKYvEs