pax_global_header00006660000000000000000000000064140614063610014513gustar00rootroot0000000000000052 comment=3191886658f7717c00ec013eb778bc1ced5cef0c vdirsyncer-0.18.0/000077500000000000000000000000001406140636100137715ustar00rootroot00000000000000vdirsyncer-0.18.0/.builds/000077500000000000000000000000001406140636100153315ustar00rootroot00000000000000vdirsyncer-0.18.0/.builds/archlinux.yaml000066400000000000000000000015571406140636100202220ustar00rootroot00000000000000# Run tests using the packaged dependencies on ArchLinux. image: archlinux packages: - docker - docker-compose # Build dependencies: - python-pip - python-wheel # Runtime dependencies: - python-atomicwrites - python-click - python-click-log - python-click-threading - python-requests - python-requests-toolbelt # Test dependencies: - python-hypothesis - python-pytest-cov - python-pytest-localserver sources: - https://github.com/pimutils/vdirsyncer environment: BUILD: test CI: true CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79 DAV_SERVER: radicale xandikos REQUIREMENTS: release # TODO: ETESYNC_TESTS tasks: - setup: | sudo systemctl start docker cd vdirsyncer python setup.py build sudo pip install --no-index . - test: | cd vdirsyncer make -e ci-test make -e ci-test-storage vdirsyncer-0.18.0/.builds/tests-minimal.yaml000066400000000000000000000014001406140636100207760ustar00rootroot00000000000000# Run tests using oldest available dependency versions. # # TODO: It might make more sense to test with an older Ubuntu or Fedora version # here, and consider that our "oldest suppported environment". image: archlinux packages: - docker - docker-compose - python-pip sources: - https://github.com/pimutils/vdirsyncer environment: BUILD: test CI: true CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79 DAV_SERVER: radicale xandikos REQUIREMENTS: minimal # TODO: ETESYNC_TESTS tasks: - setup: | sudo systemctl start docker cd vdirsyncer make -e install-dev - test: | cd vdirsyncer # Non-system python is used for packages: export PATH=$PATH:~/.local/bin/ make -e ci-test make -e ci-test-storage vdirsyncer-0.18.0/.builds/tests-release.yaml000066400000000000000000000017441406140636100210030ustar00rootroot00000000000000# Run tests using latest dependencies from PyPI image: archlinux packages: - docker - docker-compose - python-pip - twine sources: - https://github.com/pimutils/vdirsyncer secrets: - a36c8ba3-fba0-4338-b402-6aea0fbe771e environment: BUILD: test CI: true CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79 DAV_SERVER: baikal radicale xandikos REQUIREMENTS: release # TODO: ETESYNC_TESTS tasks: - setup: | sudo systemctl start docker cd vdirsyncer make -e install-dev -e install-docs - test: | cd vdirsyncer # Non-system python is used for packages: export PATH=$PATH:~/.local/bin/ make -e ci-test make -e ci-test-storage - style: | cd vdirsyncer # Non-system python is used for packages: export PATH=$PATH:~/.local/bin/ make -e style git describe --exact-match --tags || complete-build - publish: | cd vdirsyncer python setup.py sdist bdist_wheel twine upload dist/* vdirsyncer-0.18.0/.codecov.yml000066400000000000000000000002531406140636100162140ustar00rootroot00000000000000comment: false coverage: status: patch: false project: unit: flags: unit system: flags: system storage: flags: storage vdirsyncer-0.18.0/.coveragerc000066400000000000000000000006771406140636100161240ustar00rootroot00000000000000[run] branch = True [paths] source = vdirsyncer/ [report] exclude_lines = # Have to re-enable the standard pragma pragma: no cover # Don't complain about missing debug-only code: def __repr__ if self\.debug # Don't complain if tests don't hit defensive assertion code: raise AssertionError raise NotImplementedError # Don't complain if non-runnable code isn't run: if 0: if __name__ == .__main__.: vdirsyncer-0.18.0/.github/000077500000000000000000000000001406140636100153315ustar00rootroot00000000000000vdirsyncer-0.18.0/.github/workflows/000077500000000000000000000000001406140636100173665ustar00rootroot00000000000000vdirsyncer-0.18.0/.github/workflows/publish.yml000066400000000000000000000016441406140636100215640ustar00rootroot00000000000000name: Publish on: push: tags: - 0.* jobs: github-release: runs-on: ubuntu-18.04 name: Publish GitHub Release steps: - uses: actions/checkout@master - uses: actions/setup-python@v1 with: python-version: 3.7 architecture: x64 - run: pip install wheel - run: python setup.py sdist bdist_wheel - uses: softprops/action-gh-release@v1 with: files: dist/* env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} pypi: runs-on: ubuntu-18.04 name: Publish package on PyPI steps: - uses: actions/checkout@master - uses: actions/setup-python@v1 with: python-version: 3.7 architecture: x64 - run: pip install wheel - run: python setup.py sdist bdist_wheel - uses: pypa/gh-action-pypi-publish@master with: password: ${{ secrets.PYPI_TOKEN }} vdirsyncer-0.18.0/.gitignore000066400000000000000000000002441406140636100157610ustar00rootroot00000000000000*.pyc __pycache__ htmlcov .coverage build env *.egg-info .cache .pytest_cache .eggs .egg .xprocess dist docs/_build/ vdirsyncer/version.py .hypothesis coverage.xml vdirsyncer-0.18.0/.pre-commit-config.yaml000066400000000000000000000012161406140636100202520ustar00rootroot00000000000000repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.0.1 hooks: - id: trailing-whitespace args: [--markdown-linebreak-ext=md] - id: end-of-file-fixer - id: check-toml - id: check-added-large-files - id: debug-statements - repo: https://gitlab.com/pycqa/flake8 rev: "3.9.2" hooks: - id: flake8 additional_dependencies: [flake8-import-order, flake8-bugbear] - repo: https://github.com/psf/black rev: "21.6b0" hooks: - id: black - repo: https://github.com/asottile/reorder_python_imports rev: v2.5.0 hooks: - id: reorder-python-imports vdirsyncer-0.18.0/AUTHORS.rst000066400000000000000000000007521406140636100156540ustar00rootroot00000000000000Contributors ============ In alphabetical order: - Ben Boeckel - Christian Geier - Clément Mondon - Hugo Osvaldo Barrera - Julian Mehne - Malte Kiefer - Marek Marczykowski-Górecki - Markus Unterwaditzer - Michael Adler - Thomas Weißschuh Special thanks goes to: * `FastMail `_ sponsors a paid account for testing their servers. * `Packagecloud `_ provide repositories for vdirsyncer's Debian packages. vdirsyncer-0.18.0/CHANGELOG.rst000066400000000000000000000503531406140636100160200ustar00rootroot00000000000000========= Changelog ========= This changelog only contains information that might be useful to end users and package maintainers. For further info, see the git commit log. Package maintainers and users who have to manually update their installation may want to subscribe to `GitHub's tag feed `_. Version 0.18.0 ============== Note: Version 0.17 has some alpha releases but ultimately was never finalised. 0.18 actually continues where 0.16 left off. - Support for Python 3.5 and 3.6 has been dropped. This release mostly focuses on keeping vdirsyncer compatible with newer environments. - click 8 and click-threading 0.5.0 are now required. - For those using ``pipsi``, we now recommend using ``pipx``, it's successor. - Python 3.9 is now supported. - Our Debian/Ubuntu build scripts have been updated. New versions should be pushed to those repositories soon. Version 0.16.8 ============== *released 09 June 2020* - Support Python 3.7 and 3.8. This release is functionally identical to 0.16.7. It's been tested with recent Python versions, and has been marked as supporting them. It will also be the final release supporting Python 3.5 and 3.6. Version 0.16.7 ============== *released on 19 July 2018* - Fixes for Python 3.7 Version 0.16.6 ============== *released on 13 June 2018* - **Packagers:** Documentation building no longer needs a working installation of vdirsyncer. Version 0.16.5 ============== *released on 13 June 2018* - **Packagers:** click-log 0.3 is required. - All output will now happen on stderr (because of the upgrade of ``click-log``). Version 0.16.4 ============== *released on 05 February 2018* - Fix tests for new Hypothesis version. (Literally no other change included) Version 0.16.3 ============== *released on 03 October 2017* - First version with custom Debian and Ubuntu packages. See :gh:`663`. - Remove invalid ASCII control characters from server responses. See :gh:`626`. - **packagers:** Python 3.3 is no longer supported. See :ghpr:`674`. Version 0.16.2 ============== *released on 24 August 2017* - Fix crash when using daterange or item_type filters in :storage:`google_calendar`, see :gh:`657`. - **Packagers:** Fixes for new version ``0.2.0`` of ``click-log``. The version requirements for the dependency ``click-log`` changed. Version 0.16.1 ============== *released on 8 August 2017* - Removed remoteStorage support, see :gh:`647`. - Fixed test failures caused by latest requests version, see :gh:`660`. Version 0.16.0 ============== *released on 2 June 2017* - Strip ``METHOD:PUBLISH`` added by some calendar providers, see :gh:`502`. - Fix crash of Google storages when saving token file. - Make DAV discovery more RFC-conformant, see :ghpr:`585`. - Vdirsyncer is now tested against Xandikos, see :ghpr:`601`. - Subfolders with a leading dot are now ignored during discover for ``filesystem`` storage. This makes it easier to combine it with version control. - Statuses are now stored in a sqlite database. Old data is automatically migrated. Users with really large datasets should encounter performance improvements. This means that **sqlite3 is now a dependency of vdirsyncer**. - **Vdirsyncer is now licensed under the 3-clause BSD license**, see :gh:`610`. - Vdirsyncer now includes experimental support for `EteSync `_, see :ghpr:`614`. - Vdirsyncer now uses more filesystem metadata for determining whether an item changed. You will notice a **possibly heavy CPU/IO spike on the first sync after upgrading**. - **Packagers:** Reference ``systemd.service`` and ``systemd.timer`` unit files are provided. It is recommended to install these as documentation if your distribution is systemd-based. Version 0.15.0 ============== *released on 28 February 2017* - Deprecated syntax for configuration values is now completely rejected. All values now have to be valid JSON. - A few UX improvements for Google storages, see :gh:`549` and :gh:`552`. - Fix collection discovery for :storage:`google_contacts`, see :gh:`564`. - iCloud is now tested on Travis, see :gh:`567`. Version 0.14.1 ============== *released on 05 January 2017* - ``vdirsyncer repair`` no longer changes "unsafe" UIDs by default, an extra option has to be specified. See :gh:`527`. - A lot of important documentation updates. Version 0.14.0 ============== *released on 26 October 2016* - ``vdirsyncer sync`` now continues other uploads if one upload failed. The exit code in such situations is still non-zero. - Add ``partial_sync`` option to pair section. See :ref:`the config docs `. - Vdirsyncer will now warn if there's a string without quotes in your config. Please file issues if you find documentation that uses unquoted strings. - Fix an issue that would break khal's config setup wizard. Version 0.13.1 ============== *released on 30 September 2016* - Fix a bug that would completely break collection discovery. Version 0.13.0 ============== *released on 29 September 2016* - Python 2 is no longer supported at all. See :gh:`219`. - Config sections are now checked for duplicate names. This also means that you cannot have a storage section ``[storage foo]`` and a pair ``[pair foo]`` in your config, they have to have different names. This is done such that console output is always unambiguous. See :gh:`459`. - Custom commands can now be used for conflict resolution during sync. See :gh:`127`. - :storage:`http` now completely ignores UIDs. This avoids a lot of unnecessary down- and uploads. Version 0.12.1 ============== *released on 20 August 2016* - Fix a crash for Google and DAV storages. See :ghpr:`492`. - Fix an URL-encoding problem with DavMail. See :gh:`491`. Version 0.12 ============ *released on 19 August 2016* - :storage:`singlefile` now supports collections. See :ghpr:`488`. Version 0.11.3 ============== *released on 29 July 2016* - Default value of ``auth`` parameter was changed from ``guess`` to ``basic`` to resolve issues with the Apple Calendar Server (:gh:`457`) and improve performance. See :gh:`461`. - **Packagers:** The ``click-threading`` requirement is now ``>=0.2``. It was incorrect before. See :gh:`478`. - Fix a bug in the DAV XML parsing code that would make vdirsyncer crash on certain input. See :gh:`480`. - Redirect chains should now be properly handled when resolving ``well-known`` URLs. See :ghpr:`481`. Version 0.11.2 ============== *released on 15 June 2016* - Fix typo that would break tests. Version 0.11.1 ============== *released on 15 June 2016* - Fix a bug in collection validation. - Fix a cosmetic bug in debug output. - Various documentation improvements. Version 0.11.0 ============== *released on 19 May 2016* - Discovery is no longer automatically done when running ``vdirsyncer sync``. ``vdirsyncer discover`` now has to be explicitly called. - Add a ``.plist`` example for Mac OS X. - Usage under Python 2 now requires a special config parameter to be set. - Various deprecated configuration parameters do no longer have specialized errormessages. The generic error message for unknown parameters is shown. - Vdirsyncer no longer warns that the ``passwordeval`` parameter has been renamed to ``password_command``. - The ``keyring`` fetching strategy has been dropped some versions ago, but the specialized error message has been dropped. - An old status format from version 0.4 is no longer supported. If you're experiencing problems, just delete your status folder. Version 0.10.0 ============== *released on 23 April 2016* - New storage types :storage:`google_calendar` and :storage:`google_contacts` have been added. - New global command line option `--config`, to specify an alternative config file. See :gh:`409`. - The ``collections`` parameter can now be used to synchronize differently-named collections with each other. - **Packagers:** The ``lxml`` dependency has been dropped. - XML parsing is now a lot stricter. Malfunctioning servers that used to work with vdirsyncer may stop working. Version 0.9.3 ============= *released on 22 March 2016* - :storage:`singlefile` and :storage:`http` now handle recurring events properly. - Fix a typo in the packaging guidelines. - Moved to ``pimutils`` organization on GitHub. Old links *should* redirect, but be aware of client software that doesn't properly handle redirects. Version 0.9.2 ============= *released on 13 March 2016* - Fixed testsuite for environments that don't have any web browser installed. See :ghpr:`384`. Version 0.9.1 ============= *released on 13 March 2016* - Removed leftover debug print statement in ``vdirsyncer discover``, see commit ``3d856749f37639821b148238ef35f1acba82db36``. - ``metasync`` will now strip whitespace from the start and the end of the values. See :gh:`358`. - New ``Packaging Guidelines`` have been added to the documentation. Version 0.9.0 ============= *released on 15 February 2016* - The ``collections`` parameter is now required in pair configurations. Vdirsyncer will tell you what to do in its error message. See :gh:`328`. Version 0.8.1 ============= *released on 30 January 2016* - Fix error messages when invalid parameter fetching strategy is used. This is important because users would receive awkward errors for using deprecated ``keyring`` fetching. Version 0.8.0 ============= *released on 27 January 2016* - Keyring support has been removed, which means that ``password.fetch = ["keyring", "example.com", "myuser"]`` doesn't work anymore. For existing setups: Use ``password.fetch = ["command", "keyring", "get", "example.com", "myuser"]`` instead, which is more generic. See the documentation for details. - Now emitting a warning when running under Python 2. See :gh:`219`. Version 0.7.5 ============= *released on 23 December 2015* - Fixed a bug in :storage:`remotestorage` that would try to open a CLI browser for OAuth. - Fix a packaging bug that would prevent vdirsyncer from working with newer lxml versions. Version 0.7.4 ============= *released on 22 December 2015* - Improved error messages instead of faulty server behavior, see :gh:`290` and :gh:`300`. - Safer shutdown of threadpool, avoid exceptions, see :gh:`291`. - Fix a sync bug for read-only storages see commit ``ed22764921b2e5bf6a934cf14aa9c5fede804d8e``. - Etag changes are no longer sufficient to trigger sync operations. An actual content change is also necessary. See :gh:`257`. - :storage:`remotestorage` now automatically opens authentication dialogs in your configured GUI browser. - **Packagers:** ``lxml>=3.1`` is now required (newer lower-bound version). Version 0.7.3 ============= *released on 05 November 2015* - Make remotestorage-dependencies actually optional. Version 0.7.2 ============= *released on 05 November 2015* - Un-break testsuite. Version 0.7.1 ============= *released on 05 November 2015* - **Packagers:** The setuptools extras ``keyring`` and ``remotestorage`` have been added. They're basically optional dependencies. See ``setup.py`` for more details. - Highly experimental remoteStorage support has been added. It may be completely overhauled or even removed in any version. - Removed mentions of old ``password_command`` in documentation. Version 0.7.0 ============= *released on 27 October 2015* - **Packagers:** New dependencies are ``click_threading``, ``click_log`` and ``click>=5.0``. - ``password_command`` is gone. Keyring support got completely overhauled. See :doc:`keyring`. Version 0.6.0 ============= *released on 06 August 2015* - ``password_command`` invocations with non-zero exit code are now fatal (and will abort synchronization) instead of just producing a warning. - Vdirsyncer is now able to synchronize metadata of collections. Set ``metadata = ["displayname"]`` and run ``vdirsyncer metasync``. - **Packagers:** Don't use the GitHub tarballs, but the PyPI ones. - **Packagers:** ``build.sh`` is gone, and ``Makefile`` is included in tarballs. See the content of ``Makefile`` on how to run tests post-packaging. - ``verify_fingerprint`` doesn't automatically disable ``verify`` anymore. Version 0.5.2 ============= *released on 15 June 2015* - Vdirsyncer now checks and corrects the permissions of status files. - Vdirsyncer is now more robust towards changing UIDs inside items. - Vdirsyncer is now handling unicode hrefs and UIDs correctly. Software that produces non-ASCII UIDs is broken, but apparently it exists. Version 0.5.1 ============= *released on 29 May 2015* - **N.b.: The PyPI upload of 0.5.0 is completely broken.** - Raise version of required requests-toolbelt to ``0.4.0``. - Command line should be a lot faster when no work is done, e.g. for help output. - Fix compatibility with iCloud again. - Use only one worker if debug mode is activated. - ``verify=false`` is now disallowed in vdirsyncer, please use ``verify_fingerprint`` instead. - Fixed a bug where vdirsyncer's DAV storage was not using the configured useragent for collection discovery. Version 0.4.4 ============= *released on 12 March 2015* - Support for client certificates via the new ``auth_cert`` parameter, see :gh:`182` and :ghpr:`183`. - The ``icalendar`` package is no longer required. - Several bugfixes related to collection creation. Version 0.4.3 ============= *released on 20 February 2015* - More performance improvements to ``singlefile``-storage. - Add ``post_hook`` param to ``filesystem``-storage. - Collection creation now also works with SabreDAV-based servers, such as Baikal or ownCloud. - Removed some workarounds for Radicale. Upgrading to the latest Radicale will fix the issues. - Fixed issues with iCloud discovery. - Vdirsyncer now includes a simple ``repair`` command that seeks to fix some broken items. Version 0.4.2 ============= *released on 30 January 2015* - Vdirsyncer now respects redirects when uploading and updating items. This might fix issues with Zimbra. - Relative ``status_path`` values are now interpreted as relative to the configuration file's directory. - Fixed compatibility with custom SabreDAV servers. See :gh:`166`. - Catch harmless threading exceptions that occur when shutting down vdirsyncer. See :gh:`167`. - Vdirsyncer now depends on ``atomicwrites``. - Massive performance improvements to ``singlefile``-storage. - Items with extremely long UIDs should now be saved properly in ``filesystem``-storage. See :gh:`173`. Version 0.4.1 ============= *released on 05 January 2015* - All ``create`` arguments from all storages are gone. Vdirsyncer now asks if it should try to create collections. - The old config values ``True``, ``False``, ``on``, ``off`` and ``None`` are now invalid. - UID conflicts are now properly handled instead of ignoring one item. Card- and CalDAV servers are already supposed to take care of those though. - Official Baikal support added. Version 0.4.0 ============= *released on 31 December 2014* - The ``passwordeval`` parameter has been renamed to ``password_command``. - The old way of writing certain config values such as lists is now gone. - Collection discovery has been rewritten. Old configuration files should be compatible with it, but vdirsyncer now caches the results of the collection discovery. You have to run ``vdirsyncer discover`` if collections were added or removed on one side. - Pair and storage names are now restricted to certain characters. Vdirsyncer will issue a clear error message if your configuration file is invalid in that regard. - Vdirsyncer now supports the XDG-Basedir specification. If the ``VDIRSYNCER_CONFIG`` environment variable isn't set and the ``~/.vdirsyncer/config`` file doesn't exist, it will look for the configuration file at ``$XDG_CONFIG_HOME/vdirsyncer/config``. - Some improvements to CardDAV and CalDAV discovery, based on problems found with FastMail. Support for ``.well-known``-URIs has been added. Version 0.3.4 ============= *released on 8 December 2014* - Some more bugfixes to config handling. Version 0.3.3 ============= *released on 8 December 2014* - Vdirsyncer now also works with iCloud. Particularly collection discovery and etag handling were fixed. - Vdirsyncer now encodes Cal- and CardDAV requests differently. This hasn't been well-tested with servers like Zimbra or SoGo, but isn't expected to cause any problems. - Vdirsyncer is now more robust regarding invalid responses from CalDAV servers. This should help with future compatibility with Davmail/Outlook. - Fix a bug when specifying ``item_types`` of :storage:`caldav` in the deprecated config format. - Fix a bug where vdirsyncer would ignore all but one character specified in ``unsafe_href_chars`` of :storage:`caldav` and :storage:`carddav`. Version 0.3.2 ============= *released on 3 December 2014* - The current config format has been deprecated, and support for it will be removed in version 0.4.0. Vdirsyncer warns about this now. Version 0.3.1 ============= *released on 24 November 2014* - Fixed a bug where vdirsyncer would delete items if they're deleted on side A but modified on side B. Instead vdirsyncer will now upload the new items to side A. See :gh:`128`. - Synchronization continues with the remaining pairs if one pair crashes, see :gh:`121`. - The ``processes`` config key is gone. There is now a ``--max-workers`` option on the CLI which has a similar purpose. See :ghpr:`126`. - The Read The Docs-theme is no longer required for building the docs. If it is not installed, the default theme will be used. See :gh:`134`. Version 0.3.0 ============= *released on 20 September 2014* - Add ``verify_fingerprint`` parameter to :storage:`http`, :storage:`caldav` and :storage:`carddav`, see :gh:`99` and :ghpr:`106`. - Add ``passwordeval`` parameter to :ref:`general_config`, see :gh:`108` and :ghpr:`117`. - Emit warnings (instead of exceptions) about certain invalid responses from the server, see :gh:`113`. This is apparently required for compatibility with Davmail. Version 0.2.5 ============= *released on 27 August 2014* - Don't ask for the password of one server more than once and fix multiple concurrency issues, see :gh:`101`. - Better validation of DAV endpoints. Version 0.2.4 ============= *released on 18 August 2014* - Include workaround for collection discovery with latest version of Radicale. - Include metadata files such as the changelog or license in source distribution, see :gh:`97` and :gh:`98`. Version 0.2.3 ============= *released on 11 August 2014* - Vdirsyncer now has a ``--version`` flag, see :gh:`92`. - Fix a lot of bugs related to special characters in URLs, see :gh:`49`. Version 0.2.2 ============= *released on 04 August 2014* - Remove a security check that caused problems with special characters in DAV URLs and certain servers. On top of that, the security check was nonsensical. See :gh:`87` and :gh:`91`. - Change some errors to warnings, see :gh:`88`. - Improve collection autodiscovery for servers without full support. Version 0.2.1 ============= *released on 05 July 2014* - Fix bug where vdirsyncer shows empty addressbooks when using CardDAV with Zimbra. - Fix infinite loop when password doesn't exist in system keyring. - Colorized errors, warnings and debug messages. - vdirsyncer now depends on the ``click`` package instead of argvard. Version 0.2.0 ============= *released on 12 June 2014* - vdirsyncer now depends on the ``icalendar`` package from PyPI, to get rid of its own broken parser. - vdirsyncer now also depends on ``requests_toolbelt``. This makes it possible to guess the authentication type instead of blankly assuming ``basic``. - Fix a semi-bug in caldav and carddav storages where a tuple (href, etag) instead of the proper etag would have been returned from the upload method. vdirsyncer might do unnecessary copying when upgrading to this version. - Add the storage :storage:`singlefile`. See :gh:`48`. - The ``collections`` parameter for pair sections now accepts the special values ``from a`` and ``from b`` for automatically discovering collections. See :ref:`pair_config`. - The ``read_only`` parameter was added to storage sections. See :ref:`storage_config`. Version 0.1.5 ============= *released on 14 May 2014* - Introduced changelogs - Many bugfixes - Many doc fixes - vdirsyncer now doesn't necessarily need UIDs anymore for synchronization. - vdirsyncer now aborts if one collection got completely emptied between synchronizations. See :gh:`42`. vdirsyncer-0.18.0/CODE_OF_CONDUCT.rst000066400000000000000000000000631406140636100167770ustar00rootroot00000000000000See `the pimutils CoC `_. vdirsyncer-0.18.0/CONTRIBUTING.rst000066400000000000000000000002051406140636100164270ustar00rootroot00000000000000Please see `the documentation `_ for how to contribute to this project. vdirsyncer-0.18.0/ISSUE_TEMPLATE.md000066400000000000000000000010111406140636100164670ustar00rootroot00000000000000Before you submit bug reports: https://vdirsyncer.pimutils.org/en/stable/contributing.html Things to include in your bugreport: * Your vdirsyncer version * If applicable, which server software (and which version) you're using * Your Python version * Your operating system * Your config file * Use `vdirsyncer -vdebug` for debug output. The output is sensitive, but please attach at least the last few lines before the error (if applicable), censored as necessary. This is almost always the most useful information. vdirsyncer-0.18.0/LICENSE000066400000000000000000000030721406140636100150000ustar00rootroot00000000000000Copyright (c) 2014-2020 by Markus Unterwaditzer & contributors. See AUTHORS.rst for more details. Some rights reserved. Redistribution and use in source and binary forms of the software as well as documentation, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * The names of the contributors may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. vdirsyncer-0.18.0/MANIFEST.in000066400000000000000000000004541406140636100155320ustar00rootroot00000000000000# setuptools-scm includes everything tracked by git prune docker prune scripts prune tests/storage/servers prune tests/storage/etesync recursive-include tests/storage/servers/radicale * recursive-include tests/storage/servers/skip * prune docs/_build global-exclude *.py[cdo] __pycache__ *.so *.pyd vdirsyncer-0.18.0/Makefile000066400000000000000000000054251406140636100154370ustar00rootroot00000000000000# See the documentation on how to run the tests: # https://vdirsyncer.pimutils.org/en/stable/contributing.html # Which DAV server to run the tests against (radicale, xandikos, skip, owncloud, nextcloud, ...) export DAV_SERVER := skip # release (install release versions of dependencies) # development (install development versions of some of vdirsyncer's dependencies) # or minimal (install oldest version of each dependency that is supported by vdirsyncer) export REQUIREMENTS := release # Set this to true if you run vdirsyncer's test as part of e.g. packaging. export DETERMINISTIC_TESTS := false # Run the etesync testsuite. export ETESYNC_TESTS := false # Assume to run in CI. Don't use this outside of a virtual machine. It will # heavily "pollute" your system, such as attempting to install a new Python # systemwide. export CI := false # Whether to generate coverage data while running tests. export COVERAGE := $(CI) # Additional arguments that should be passed to py.test. PYTEST_ARGS = # Variables below this line are not very interesting for getting started. TEST_EXTRA_PACKAGES = ifeq ($(ETESYNC_TESTS), true) TEST_EXTRA_PACKAGES += git+https://github.com/etesync/journal-manager@v0.5.2 TEST_EXTRA_PACKAGES += django djangorestframework==3.8.2 wsgi_intercept drf-nested-routers endif PYTEST = py.test $(PYTEST_ARGS) CODECOV_PATH = /tmp/codecov.sh all: $(error Take a look at https://vdirsyncer.pimutils.org/en/stable/tutorial.html#installation) ci-test: curl -s https://codecov.io/bash > $(CODECOV_PATH) $(PYTEST) tests/unit/ bash $(CODECOV_PATH) -c -F unit $(PYTEST) tests/system/ bash $(CODECOV_PATH) -c -F system [ "$(ETESYNC_TESTS)" = "false" ] || make test-storage ci-test-storage: curl -s https://codecov.io/bash > $(CODECOV_PATH) set -ex; \ for server in $(DAV_SERVER); do \ DAV_SERVER=$$server $(PYTEST) --cov-append tests/storage; \ done bash $(CODECOV_PATH) -c -F storage test: $(PYTEST) style: pre-commit run --all ! git grep -i syncroniz */* ! git grep -i 'text/icalendar' */* sphinx-build -W -b html ./docs/ ./docs/_build/html/ install-docs: pip install -Ur docs-requirements.txt docs: cd docs && make html sphinx-build -W -b linkcheck ./docs/ ./docs/_build/linkcheck/ release-deb: sh scripts/release-deb.sh debian jessie sh scripts/release-deb.sh debian stretch sh scripts/release-deb.sh ubuntu trusty sh scripts/release-deb.sh ubuntu xenial sh scripts/release-deb.sh ubuntu zesty install-dev: pip install -U pip setuptools wheel pip install -e . pip install -Ur test-requirements.txt $(TEST_EXTRA_PACKAGES) pip install pre-commit [ "$(ETESYNC_TESTS)" = "false" ] || pip install -Ue .[etesync] set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \ pip install -U --force-reinstall $$(python setup.py --quiet minimal_requirements); \ fi .PHONY: docs vdirsyncer-0.18.0/README.rst000066400000000000000000000042451406140636100154650ustar00rootroot00000000000000========== vdirsyncer ========== .. image:: https://builds.sr.ht/~whynothugo/vdirsyncer.svg :target: https://builds.sr.ht/~whynothugo/vdirsyncer :alt: CI status .. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=master :target: https://codecov.io/github/pimutils/vdirsyncer?branch=master :alt: Codecov coverage report .. image:: https://readthedocs.org/projects/vdirsyncer/badge/ :target: https://vdirsyncer.rtfd.org/ :alt: documentation .. image:: https://img.shields.io/pypi/v/vdirsyncer.svg :target: https://pypi.python.org/pypi/vdirsyncer :alt: version on pypi .. image:: https://img.shields.io/badge/deb-packagecloud.io-844fec.svg :target: https://packagecloud.io/pimutils/vdirsyncer :alt: Debian packages .. image:: https://img.shields.io/pypi/l/vdirsyncer.svg :target: https://github.com/pimutils/vdirsyncer/blob/master/LICENCE :alt: licence: BSD - `Documentation `_ - `Source code `_ Vdirsyncer is a command-line tool for synchronizing calendars and addressbooks between a variety of servers and the local filesystem. The most popular usecase is to synchronize a server with a local folder and use a set of other programs_ to change the local events and contacts. Vdirsyncer can then synchronize those changes back to the server. However, vdirsyncer is not limited to synchronizing between clients and servers. It can also be used to synchronize calendars and/or addressbooks between two servers directly. It aims to be for calendars and contacts what `OfflineIMAP `_ is for emails. .. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/ Links of interest ================= * Check out `the tutorial `_ for basic usage. * `Contact information `_ * `How to contribute to this project `_ * `Donations `_ License ======= Licensed under the 3-clause BSD license, see ``LICENSE``. vdirsyncer-0.18.0/config.example000066400000000000000000000042031406140636100166120ustar00rootroot00000000000000# An example configuration for vdirsyncer. # # Move it to ~/.vdirsyncer/config or ~/.config/vdirsyncer/config and edit it. # Run `vdirsyncer --help` for CLI usage. # # Optional parameters are commented out. # This file doesn't document all available parameters, see # http://vdirsyncer.pimutils.org/ for the rest of them. [general] # A folder where vdirsyncer can store some metadata about each pair. status_path = "~/.vdirsyncer/status/" # CARDDAV [pair bob_contacts] # A `[pair ]` block defines two storages `a` and `b` that should be # synchronized. The definition of these storages follows in `[storage ]` # blocks. This is similar to accounts in OfflineIMAP. a = "bob_contacts_local" b = "bob_contacts_remote" # Synchronize all collections that can be found. # You need to run `vdirsyncer discover` if new calendars/addressbooks are added # on the server. collections = ["from a", "from b"] # Synchronize the "display name" property into a local file (~/.contacts/displayname). metadata = ["displayname"] # To resolve a conflict the following values are possible: # `null` - abort when collisions occur (default) # `"a wins"` - assume a's items to be more up-to-date # `"b wins"` - assume b's items to be more up-to-date #conflict_resolution = null [storage bob_contacts_local] # A storage references actual data on a remote server or on the local disk. # Similar to repositories in OfflineIMAP. type = "filesystem" path = "~/.contacts/" fileext = ".vcf" [storage bob_contacts_remote] type = "carddav" url = "https://owncloud.example.com/remote.php/carddav/" #username = # The password can also be fetched from the system password storage, netrc or a # custom command. See http://vdirsyncer.pimutils.org/en/stable/keyring.html #password = # CALDAV [pair bob_calendar] a = "bob_calendar_local" b = "bob_calendar_remote" collections = ["from a", "from b"] # Calendars also have a color property metadata = ["displayname", "color"] [storage bob_calendar_local] type = "filesystem" path = "~/.calendars/" fileext = ".ics" [storage bob_calendar_remote] type = "caldav" url = "https://owncloud.example.com/remote.php/caldav/" #username = #password = vdirsyncer-0.18.0/contrib/000077500000000000000000000000001406140636100154315ustar00rootroot00000000000000vdirsyncer-0.18.0/contrib/vdirsyncer.plist000066400000000000000000000030571406140636100207030ustar00rootroot00000000000000 EnvironmentVariables LANG @@LOCALE@@ LC_ALL @@LOCALE@@ Label vdirsyncer WorkingDirectory @@WORKINGDIRECTORY@@ ProgramArguments @@VDIRSYNCER@@ -v ERROR sync RunAtLoad StartInterval @@SYNCINTERVALL@@ vdirsyncer-0.18.0/contrib/vdirsyncer.service000066400000000000000000000002721406140636100212040ustar00rootroot00000000000000[Unit] Description=Synchronize calendars and contacts Documentation=https://vdirsyncer.readthedocs.org/ [Service] ExecStart=/usr/bin/vdirsyncer sync RuntimeMaxSec=3m Restart=on-failure vdirsyncer-0.18.0/contrib/vdirsyncer.timer000066400000000000000000000002001406140636100206530ustar00rootroot00000000000000[Unit] Description=Synchronize vdirs [Timer] OnBootSec=5m OnUnitActiveSec=15m AccuracySec=5m [Install] WantedBy=timers.target vdirsyncer-0.18.0/docs-requirements.txt000066400000000000000000000000601406140636100201770ustar00rootroot00000000000000sphinx != 1.4.7 sphinx_rtd_theme setuptools_scm vdirsyncer-0.18.0/docs/000077500000000000000000000000001406140636100147215ustar00rootroot00000000000000vdirsyncer-0.18.0/docs/Makefile000066400000000000000000000151721406140636100163670ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/vdirsyncer.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/vdirsyncer.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/vdirsyncer" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/vdirsyncer" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." vdirsyncer-0.18.0/docs/_static/000077500000000000000000000000001406140636100163475ustar00rootroot00000000000000vdirsyncer-0.18.0/docs/_static/.gitkeep000066400000000000000000000000001406140636100177660ustar00rootroot00000000000000vdirsyncer-0.18.0/docs/changelog.rst000066400000000000000000000000361406140636100174010ustar00rootroot00000000000000.. include:: ../CHANGELOG.rst vdirsyncer-0.18.0/docs/conf.py000066400000000000000000000051341406140636100162230ustar00rootroot00000000000000import datetime import os from pkg_resources import get_distribution extensions = ["sphinx.ext.autodoc"] templates_path = ["_templates"] source_suffix = ".rst" master_doc = "index" project = "vdirsyncer" copyright = "2014-{}, Markus Unterwaditzer & contributors".format( datetime.date.today().strftime("%Y") ) release = get_distribution("vdirsyncer").version version = ".".join(release.split(".")[:2]) # The short X.Y version. rst_epilog = ".. |vdirsyncer_version| replace:: %s" % release exclude_patterns = ["_build"] pygments_style = "sphinx" on_rtd = os.environ.get("READTHEDOCS", None) == "True" try: import sphinx_rtd_theme html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] except ImportError: html_theme = "default" if not on_rtd: print("-" * 74) print( "Warning: sphinx-rtd-theme not installed, building with default " "theme." ) print("-" * 74) html_static_path = ["_static"] htmlhelp_basename = "vdirsyncerdoc" latex_elements = {} latex_documents = [ ( "index", "vdirsyncer.tex", "vdirsyncer Documentation", "Markus Unterwaditzer", "manual", ), ] man_pages = [ ("index", "vdirsyncer", "vdirsyncer Documentation", ["Markus Unterwaditzer"], 1) ] texinfo_documents = [ ( "index", "vdirsyncer", "vdirsyncer Documentation", "Markus Unterwaditzer", "vdirsyncer", "Synchronize calendars and contacts.", "Miscellaneous", ), ] def github_issue_role(name, rawtext, text, lineno, inliner, options=None, content=()): options = options or {} try: issue_num = int(text) if issue_num <= 0: raise ValueError() except ValueError: msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno) prb = inliner.problematic(rawtext, rawtext, msg) return [prb], [msg] from docutils import nodes PROJECT_HOME = "https://github.com/pimutils/vdirsyncer" link = "{}/{}/{}".format( PROJECT_HOME, "issues" if name == "gh" else "pull", issue_num ) linktext = ("issue #{}" if name == "gh" else "pull request #{}").format(issue_num) node = nodes.reference(rawtext, linktext, refuri=link, **options) return [node], [] def setup(app): from sphinx.domains.python import PyObject app.add_object_type( "storage", "storage", "pair: %s; storage", doc_field_types=PyObject.doc_field_types, ) app.add_role("gh", github_issue_role) app.add_role("ghpr", github_issue_role) vdirsyncer-0.18.0/docs/config.rst000066400000000000000000000442671406140636100167350ustar00rootroot00000000000000========================= Full configuration manual ========================= Vdirsyncer uses an ini-like format for storing its configuration. All values are JSON, invalid JSON will get interpreted as string:: x = "foo" # String x = foo # Shorthand for same string x = 42 # Integer x = ["a", "b", "c"] # List of strings x = true # Boolean x = false x = null # Also known as None .. _general_config: General Section =============== :: [general] status_path = ... - ``status_path``: A directory where vdirsyncer will store some additional data for the next sync. The data is needed to determine whether a new item means it has been added on one side or deleted on the other. Relative paths will be interpreted as relative to the configuration file's directory. See `A simple synchronization algorithm `_ for what exactly is in there. .. _pair_config: Pair Section ============ :: [pair pair_name] a = ... b = ... #collections = null #conflict_resolution = null - Pair names can consist of any alphanumeric characters and the underscore. - ``a`` and ``b`` reference the storages to sync by their names. - ``collections``: A list of collections to synchronize when ``vdirsyncer sync`` is executed. See also :ref:`collections_tutorial`. The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try autodiscovery on a specific storage. If the collection you want to sync doesn't have the same name on each side, you may also use a value of the form ``["config_name", "name_a", "name_b"]``. This will synchronize the collection ``name_a`` on side A with the collection ``name_b`` on side B. The ``config_name`` will be used for representation in CLI arguments and logging. Examples: - ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize the collections from side B, and also the collections named "foo" and "bar". - ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all existing collections on either side. - ``collections = [["bar", "bar_a", "bar_b"], "foo"]`` makes vdirsyncer synchronize ``bar_a`` from side A with ``bar_b`` from side B, and also synchronize ``foo`` on both sides with each other. - ``conflict_resolution``: Optional, define how conflicts should be handled. A conflict occurs when one item (event, task) changed on both sides since the last sync. See also :ref:`conflict_resolution_tutorial`. Valid values are: - ``null``, where an error is shown and no changes are done. - ``"a wins"`` and ``"b wins"``, where the whole item is taken from one side. - ``["command", "vimdiff"]``: ``vimdiff `` will be called where ```` and ```` are temporary files that contain the item of each side respectively. The files need to be exactly the same when the command returns. - ``vimdiff`` can be replaced with any other command. For example, in POSIX ``["command", "cp"]`` is equivalent to ``"a wins"``. - Additional list items will be forwarded as arguments. For example, ``["command", "vimdiff", "--noplugin"]`` runs ``vimdiff --noplugin``. Vdirsyncer never attempts to "automatically merge" the two items. .. _partial_sync_def: - ``partial_sync``: Assume A is read-only, B not. If you change items on B, vdirsyncer can't sync the changes to A. What should happen instead? - ``error``: An error is shown. - ``ignore``: The change is ignored. However: Events deleted in B still reappear if they're updated in A. - ``revert`` (default): The change is reverted on next sync. See also :ref:`partial_sync_tutorial`. - ``metadata``: Metadata keys that should be synchronized when ``vdirsyncer metasync`` is executed. Example:: metadata = ["color", "displayname"] This synchronizes the ``color`` and the ``displayname`` properties. The ``conflict_resolution`` parameter applies here as well. .. _storage_config: Storage Section =============== :: [storage storage_name] type = ... - Storage names can consist of any alphanumeric characters and the underscore. - ``type`` defines which kind of storage is defined. See :ref:`storages`. - ``read_only`` defines whether the storage should be regarded as a read-only storage. The value ``true`` means synchronization will discard any changes made to the other side. The value ``false`` implies normal 2-way synchronization. - Any further parameters are passed on to the storage class. .. _storages: Supported Storages ------------------ CalDAV and CardDAV ++++++++++++++++++ .. note:: Please also see :ref:`supported-servers`, as some servers may not work well. .. storage:: caldav CalDAV. :: [storage example_for_caldav] type = "caldav" #start_date = null #end_date = null #item_types = [] url = "..." #username = "" #password = "" #verify = true #auth = null #useragent = "vdirsyncer/0.16.4" #verify_fingerprint = null #auth_cert = null You can set a timerange to synchronize with the parameters ``start_date`` and ``end_date``. Inside those parameters, you can use any Python expression to return a valid :py:class:`datetime.datetime` object. For example, the following would synchronize the timerange from one year in the past to one year in the future:: start_date = "datetime.now() - timedelta(days=365)" end_date = "datetime.now() + timedelta(days=365)" Either both or none have to be specified. The default is to synchronize everything. You can set ``item_types`` to restrict the *kind of items* you want to synchronize. For example, if you want to only synchronize events (but don't download any tasks from the server), set ``item_types = ["VEVENT"]``. If you want to synchronize events and tasks, but have some ``VJOURNAL`` items on the server you don't want to synchronize, use ``item_types = ["VEVENT", "VTODO"]``. :param start_date: Start date of timerange to show, default -inf. :param end_date: End date of timerange to show, default +inf. :param item_types: Kind of items to show. The default, the empty list, is to show all. This depends on particular features on the server, the results are not validated. :param url: Base URL or an URL to a calendar. :param username: Username for authentication. :param password: Password for authentication. :param verify: Verify SSL certificate, default True. This can also be a local path to a self-signed SSL certificate. See :ref:`ssl-tutorial` for more information. :param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the expected server certificate. See :ref:`ssl-tutorial` for more information. :param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The default is preemptive Basic auth, sending credentials even if server didn't request them. This saves from an additional roundtrip per request. Consider setting ``guess`` if this causes issues with your server. :param auth_cert: Optional. Either a path to a certificate with a client certificate and the key or a list of paths to the files with them. :param useragent: Default ``vdirsyncer``. .. storage:: carddav CardDAV. :: [storage example_for_carddav] type = "carddav" url = "..." #username = "" #password = "" #verify = true #auth = null #useragent = "vdirsyncer/0.16.4" #verify_fingerprint = null #auth_cert = null :param url: Base URL or an URL to an addressbook. :param username: Username for authentication. :param password: Password for authentication. :param verify: Verify SSL certificate, default True. This can also be a local path to a self-signed SSL certificate. See :ref:`ssl-tutorial` for more information. :param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the expected server certificate. See :ref:`ssl-tutorial` for more information. :param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The default is preemptive Basic auth, sending credentials even if server didn't request them. This saves from an additional roundtrip per request. Consider setting ``guess`` if this causes issues with your server. :param auth_cert: Optional. Either a path to a certificate with a client certificate and the key or a list of paths to the files with them. :param useragent: Default ``vdirsyncer``. Google ++++++ Vdirsyncer supports synchronization with Google calendars with the restriction that ``VTODO`` files are rejected by the server. Synchronization with Google contacts is less reliable due to negligence of Google's CardDAV API. **Google's CardDAV implementation is allegedly a disaster in terms of data safety**. See `this blog post `_ for the details. Always back up your data. At first run you will be asked to authorize application for Google account access. To use this storage type, you need to install some additional dependencies:: pip install vdirsyncer[google] Furthermore you need to register vdirsyncer as an application yourself to obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of Service to hardcode those into opensource software [googleterms]_: 1. Go to the `Google API Manager `_ and create a new project under any name. 2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the Calendar and Contacts APIs, those are different and won't work). There should be a searchbox where you can just enter those terms. 3. In the sidebar, select "Credentials" and create a new "OAuth Client ID". The application type is "Other". You'll be prompted to create a OAuth consent screen first. Fill out that form however you like. 4. Finally you should have a Client ID and a Client secret. Provide these in your storage config. The ``token_file`` parameter should be a filepath where vdirsyncer can later store authentication-related data. You do not need to create the file itself or write anything to it. .. [googleterms] See `ToS `_, section "Confidential Matters". .. note:: You need to configure which calendars Google should offer vdirsyncer using a rather hidden `settings page `_. .. storage:: google_calendar Google calendar. :: [storage example_for_google_calendar] type = "google_calendar" token_file = "..." client_id = "..." client_secret = "..." #start_date = null #end_date = null #item_types = [] Please refer to :storage:`caldav` regarding the ``item_types`` and timerange parameters. :param token_file: A filepath where access tokens are stored. :param client_id/client_secret: OAuth credentials, obtained from the Google API Manager. .. storage:: google_contacts Google contacts. :: [storage example_for_google_contacts] type = "google_contacts" token_file = "..." client_id = "..." client_secret = "..." :param token_file: A filepath where access tokens are stored. :param client_id/client_secret: OAuth credentials, obtained from the Google API Manager. EteSync +++++++ `EteSync `_ is a new cloud provider for end to end encrypted contacts and calendar storage. Vdirsyncer contains **experimental** support for it. To use it, you need to install some optional dependencies:: pip install vdirsyncer[etesync] On first usage you will be prompted for the service password and the encryption password. Neither are stored. .. storage:: etesync_contacts Contacts for etesync. :: [storage example_for_etesync_contacts] email = ... secrets_dir = ... #server_path = ... #db_path = ... :param email: The email address of your account. :param secrets_dir: A directory where vdirsyncer can store the encryption key and authentication token. :param server_url: Optional. URL to the root of your custom server. :param db_path: Optional. Use a different path for the database. .. storage:: etesync_calendars Calendars for etesync. :: [storage example_for_etesync_calendars] email = ... secrets_dir = ... #server_path = ... #db_path = ... :param email: The email address of your account. :param secrets_dir: A directory where vdirsyncer can store the encryption key and authentication token. :param server_url: Optional. URL to the root of your custom server. :param db_path: Optional. Use a different path for the database. Local +++++ .. storage:: filesystem Saves each item in its own file, given a directory. :: [storage example_for_filesystem] type = "filesystem" path = "..." fileext = "..." #encoding = "utf-8" #post_hook = null #fileignoreext = ".tmp" Can be used with `khal `_. See :doc:`vdir` for a more formal description of the format. Directories with a leading dot are ignored to make usage of e.g. version control easier. :param path: Absolute path to a vdir/collection. If this is used in combination with the ``collections`` parameter in a pair-section, this should point to a directory of vdirs instead. :param fileext: The file extension to use (e.g. ``.txt``). Contained in the href, so if you change the file extension after a sync, this will trigger a re-download of everything (but *should* not cause data-loss of any kind). To be compatible with the ``vset`` format you have to either use ``.vcf`` or ``.ics``. Note that metasync won't work if you use an empty string here. :param encoding: File encoding for items, both content and filename. :param post_hook: A command to call for each item creation and modification. The command will be called with the path of the new/updated file. :param fileeignoreext: The file extention to ignore. It is only useful if fileext is set to the empty string. The default is ``.tmp``. .. storage:: singlefile Save data in single local ``.vcf`` or ``.ics`` file. The storage basically guesses how items should be joined in the file. .. versionadded:: 0.1.6 .. note:: This storage is very slow, and that is unlikely to change. You should consider using :storage:`filesystem` if it fits your usecase. :param path: The filepath to the file to be written to. If collections are used, this should contain ``%s`` as a placeholder for the collection name. :param encoding: Which encoding the file should use. Defaults to UTF-8. Example for syncing with :storage:`caldav`:: [pair my_calendar] a = my_calendar_local b = my_calendar_remote collections = ["from a", "from b"] [storage my_calendar_local] type = "singlefile" path = ~/.calendars/%s.ics [storage my_calendar_remote] type = "caldav" url = https://caldav.example.org/ #username = #password = Example for syncing with :storage:`caldav` using a ``null`` collection:: [pair my_calendar] a = my_calendar_local b = my_calendar_remote [storage my_calendar_local] type = "singlefile" path = ~/my_calendar.ics [storage my_calendar_remote] type = "caldav" url = https://caldav.example.org/username/my_calendar/ #username = #password = Read-only storages ++++++++++++++++++ These storages don't support writing of their items, consequently ``read_only`` is set to ``true`` by default. Changing ``read_only`` to ``false`` on them leads to an error. .. storage:: http Use a simple ``.ics`` file (or similar) from the web. ``webcal://``-calendars are supposed to be used with this, but you have to replace ``webcal://`` with ``http://``, or better, ``https://``. :: [pair holidays] a = holidays_local b = holidays_remote collections = null [storage holidays_local] type = "filesystem" path = ~/.config/vdir/calendars/holidays/ fileext = .ics [storage holidays_remote] type = "http" url = https://example.com/holidays_from_hicksville.ics Too many WebCAL providers generate UIDs of all ``VEVENT``-components on-the-fly, i.e. all UIDs change every time the calendar is downloaded. This leads many synchronization programs to believe that all events have been deleted and new ones created, and accordingly causes a lot of unnecessary uploads and deletions on the other side. Vdirsyncer completely ignores UIDs coming from :storage:`http` and will replace them with a hash of the normalized item content. :param url: URL to the ``.ics`` file. :param username: Username for authentication. :param password: Password for authentication. :param verify: Verify SSL certificate, default True. This can also be a local path to a self-signed SSL certificate. See :ref:`ssl-tutorial` for more information. :param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the expected server certificate. See :ref:`ssl-tutorial` for more information. :param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The default is preemptive Basic auth, sending credentials even if server didn't request them. This saves from an additional roundtrip per request. Consider setting ``guess`` if this causes issues with your server. :param auth_cert: Optional. Either a path to a certificate with a client certificate and the key or a list of paths to the files with them. :param useragent: Default ``vdirsyncer``. vdirsyncer-0.18.0/docs/contact.rst000066400000000000000000000011421406140636100171040ustar00rootroot00000000000000=================== Support and Contact =================== * The ``#pimutils`` `IRC channel on Libera.Chat `_ might be active, depending on your timezone. Use it for support and general (including off-topic) discussion. * Open `a GitHub issue `_ for concrete bug reports and feature requests. * Lastly, you can also `contact the author directly `_. Do this for security issues. If that doesn't work out (i.e. if I don't respond within one week), use ``contact@pimutils.org``. vdirsyncer-0.18.0/docs/contributing.rst000066400000000000000000000107171406140636100201700ustar00rootroot00000000000000============================ Contributing to this project ============================ .. note:: - Please read :doc:`contact` for questions and support requests. - All participants must follow the `pimutils Code of Conduct `_. The issue tracker ================= We use `GitHub issues `_ for organizing bug reports and feature requests. The following `labels `_ are of interest: * "Planning" is for issues that are still undecided, but where at least some discussion exists. * "Blocked" is for issues that can't be worked on at the moment because some other unsolved problem exists. This problem may be a bug in some software dependency, for instance. * "Ready" contains issues that are ready to work on. If you just want to get started with contributing, the "ready" issues are an option. Issues that are still in "Planning" are also an option, but require more upfront thinking and may turn out to be impossible to solve, or at least harder than anticipated. On the flip side those tend to be the more interesting issues as well, depending on how one looks at it. All of those labels are also available as a kanban board on `waffle.io `_. It is really just an alternative overview over all issues, but might be easier to comprehend. Feel free to :doc:`contact ` me or comment on the relevant issues for further information. Reporting bugs -------------- * Make sure your problem isn't already listed in :doc:`problems`. * Make sure you have the absolutely latest version of vdirsyncer. For users of some Linux distributions such as Debian or Fedora this may not be the version that your distro offers. In those cases please file a bug against the distro package, not against upstream vdirsyncer. * Use ``--verbosity=DEBUG`` when including output from vdirsyncer. Suggesting features ------------------- If you're suggesting a feature, keep in mind that vdirsyncer tries not to be a full calendar or contacts client, but rather just the piece of software that synchronizes all the data. :doc:`Take a look at the documentation for software working with vdirsyncer `. Submitting patches, pull requests ================================= * **Discuss everything in the issue tracker first** (or contact me somehow else) before implementing it. * Make sure the tests pass. See below for running them. * But not because you wrote too few tests. * Add yourself to ``AUTHORS.rst``, and add a note to ``CHANGELOG.rst`` too. Running tests, how to set up your development environment --------------------------------------------------------- For many patches, it might suffice to just let CI run the tests. However, CI is slow, so you might want to run them locally too. For this, set up a virtualenv_ and run this inside of it:: # install: # - vdirsyncer from the repo into the virtualenv # - stylecheckers (flake8) and code formatters (autopep8) make install-dev # Install git commit hook for some extra linting and checking pre-commit install # Install development dependencies make install-dev Then you can run:: make test # The normal testsuite make style # Stylechecker make docs # Build the HTML docs, output is at docs/_build/html/ The ``Makefile`` has a lot of options that allow you to control which tests are run, and which servers are tested. Take a look at its code where they are all initialized and documented. To tests against a specific DAV server, use ``DAV_SERVER``:: make DAV_SERVER=xandikos test The server will be initialised in a docker container and terminated at the end of the test suite. If you have any questions, feel free to open issues about it. Structure of the testsuite -------------------------- Within ``tests/``, there are three main folders: - ``system`` contains system- and also integration tests. A rough rule is: If the test is using temporary files, put it here. - ``unit``, where each testcase tests a single class or function. - ``storage`` runs a generic storage testsuite against all storages. The reason for this separation is: We are planning to generate separate coverage reports for each of those testsuites. Ideally ``unit`` would generate palatable coverage of the entire codebase *on its own*, and the *combination* of ``system`` and ``storage`` as well. .. _virtualenv: http://virtualenv.readthedocs.io/ vdirsyncer-0.18.0/docs/donations.rst000066400000000000000000000013641406140636100174550ustar00rootroot00000000000000========= Donations ========= If you found my work useful, please consider donating. Thank you! - Bitcoin: ``16sSHxZm263WHR9P9PJjCxp64jp9ooXKVt`` - `PayPal.me `_ - `Bountysource `_ is useful for funding work on a specific GitHub issue. - There's also `Bountysource Salt `_, for one-time and recurring donations. - Donations via Bountysource are publicly listed. Use PayPal if you dislike that. - `Flattr `_ or `Gratipay `_ can be used for recurring donations. vdirsyncer-0.18.0/docs/index.rst000066400000000000000000000022211406140636100165570ustar00rootroot00000000000000========== vdirsyncer ========== - `Documentation `_ - `Source code `_ Vdirsyncer is a command-line tool for synchronizing calendars and addressbooks between a variety of servers and the local filesystem. The most popular usecase is to synchronize a server with a local folder and use a set of other :doc:`programs ` to change the local events and contacts. Vdirsyncer can then synchronize those changes back to the server. However, vdirsyncer is not limited to synchronizing between clients and servers. It can also be used to synchronize calendars and/or addressbooks between two servers directly. It aims to be for calendars and contacts what `OfflineIMAP `_ is for emails. .. toctree:: :caption: Users :maxdepth: 1 when installation tutorial ssl-tutorial keyring partial-sync config tutorials/index problems .. toctree:: :caption: Developers :maxdepth: 1 contributing vdir .. toctree:: :caption: General :maxdepth: 1 packaging contact changelog license donations vdirsyncer-0.18.0/docs/installation.rst000066400000000000000000000077221406140636100201640ustar00rootroot00000000000000.. _installation: ============ Installation ============ OS/distro packages ------------------ The following packages are user-contributed and were up-to-date at the time of writing: - `ArchLinux `_ - `Ubuntu and Debian, x86_64-only `_ (packages also exist in the official repositories but may be out of date) - `GNU Guix `_ - `OS X (homebrew) `_ - `BSD (pkgsrc) `_ - `OpenBSD `_ We only support the latest version of vdirsyncer, which is at the time of this writing |vdirsyncer_version|. Please **do not file bugs if you use an older version**. Some distributions have multiple release channels. Debian and Fedora for example have a "stable" release channel that ships an older version of vdirsyncer. Those versions aren't supported either. If there is no suitable package for your distribution, you'll need to :ref:`install vdirsyncer manually `. There is an easy command to copy-and-paste for this as well, but you should be aware of its consequences. .. _manual-installation: Manual installation ------------------- If your distribution doesn't provide a package for vdirsyncer, you still can use Python's package manager "pip". First, you'll have to check that the following things are installed: - Python 3.7+ and pip. - ``libxml`` and ``libxslt`` - ``zlib`` - Linux or OS X. **Windows is not supported**, see :gh:`535`. On Linux systems, using the distro's package manager is the best way to do this, for example, using Ubuntu:: sudo apt-get install libxml2 libxslt1.1 zlib1g python Then you have several options. The following text applies for most Python software by the way. The dirty, easy way ~~~~~~~~~~~~~~~~~~~ The easiest way to install vdirsyncer at this point would be to run:: pip install --user --ignore-installed vdirsyncer - ``--user`` is to install without root rights (into your home directory) - ``--ignore-installed`` is to work around Debian's potentially broken packages (see :ref:`debian-urllib3`). This method has a major flaw though: Pip doesn't keep track of the files it installs. Vdirsyncer's files would be located somewhere in ``~/.local/lib/python*``, but you can't possibly know which packages were installed as dependencies of vdirsyncer and which ones were not, should you decide to uninstall it. In other words, using pip that way would pollute your home directory. The clean, hard way ~~~~~~~~~~~~~~~~~~~ There is a way to install Python software without scattering stuff across your filesystem: virtualenv_. There are a lot of resources on how to use it, the simplest possible way would look something like:: virtualenv ~/vdirsyncer_env ~/vdirsyncer_env/bin/pip install vdirsyncer alias vdirsyncer="~/vdirsyncer_env/bin/vdirsyncer" You'll have to put the last line into your ``.bashrc`` or ``.bash_profile``. This method has two advantages: - It separately installs all Python packages into ``~/vdirsyncer_env/``, without relying on the system packages. This works around OS- or distro-specific issues. - You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely. The clean, easy way ~~~~~~~~~~~~~~~~~~~ pipx_ is a new package manager for Python-based software that automatically sets up a virtualenv for each program you install. Assuming you have it installed on your operating system, you can do:: pipx install vdirsyncer and ``~/.local/pipx/venvs/vdirsyncer`` will be your new vdirsyncer installation. To update vdirsyncer to the latest version:: pipx upgrade vdirsyncer If you're done with vdirsyncer, you can do:: pipx uninstall vdirsyncer and vdirsyncer will be uninstalled, including its dependencies. .. _virtualenv: https://virtualenv.readthedocs.io/ .. _pipx: https://github.com/pipxproject/pipx vdirsyncer-0.18.0/docs/keyring.rst000066400000000000000000000034101406140636100171210ustar00rootroot00000000000000================= Storing passwords ================= .. versionchanged:: 0.7.0 Password configuration got completely overhauled. Vdirsyncer can fetch passwords from several sources other than the config file. Command ======= Say you have the following configuration:: [storage foo] type = "caldav" url = ... username = "foo" password = "bar" But it bugs you that the password is stored in cleartext in the config file. You can do this:: [storage foo] type = "caldav" url = ... username = "foo" password.fetch = ["command", "~/get-password.sh", "more", "args"] You can fetch the username as well:: [storage foo] type = "caldav" url = ... username.fetch = ["command", "~/get-username.sh"] password.fetch = ["command", "~/get-password.sh"] Or really any kind of parameter in a storage section. With pass_ for example, you might find yourself writing something like this in your configuration file:: password.fetch = ["command", "pass", "caldav"] .. _pass: https://www.passwordstore.org/ Accessing the system keyring ---------------------------- As shown above, you can use the ``command`` strategy to fetch your credentials from arbitrary sources. A very common usecase is to fetch your password from the system keyring. The keyring_ Python package contains a command-line utility for fetching passwords from the OS's password store. Installation:: pip install keyring Basic usage:: password.fetch = ["command", "keyring", "get", "example.com", "foouser"] .. _keyring: https://github.com/jaraco/keyring/ Password Prompt =============== You can also simply prompt for the password:: [storage foo] type = "caldav" username = "myusername" password.fetch = ["prompt", "Password for CalDAV"] vdirsyncer-0.18.0/docs/license.rst000066400000000000000000000002031406140636100170700ustar00rootroot00000000000000=================== Credits and License =================== .. include:: ../AUTHORS.rst License ======= .. include:: ../LICENSE vdirsyncer-0.18.0/docs/make.bat000066400000000000000000000150651406140636100163350ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\vdirsyncer.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\vdirsyncer.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end vdirsyncer-0.18.0/docs/packaging.rst000066400000000000000000000064331406140636100174050ustar00rootroot00000000000000==================== Packaging guidelines ==================== Thank you very much for packaging vdirsyncer! The following guidelines should help you to avoid some common pitfalls. If you find yourself needing to patch anything, or going in a different direction, please open an issue so we can also address in a way that works for everyone. Otherwise we get bug reports for code or scenarios that don't exist in upstream vdirsycner. Obtaining the source code ========================= The main distribution channel is `PyPI `_, and source tarballs can be obtained there. We mirror the same package tarball and wheel as GitHub releases. Please do not confuse these with the auto-generated GitHub "Source Code" tarball. Those are missing some important metadata and your build will fail. We give each release a tag in the git repo. If you want to get notified of new releases, `GitHub's feed `_ is a good way. Tags will be signed by the maintainer who is doing the release (starting with 0.16.8), and generation of the tarball and wheel is done by CI. Hence, only the tag itself is signed. Dependency versions =================== As with most Python packages, ``setup.py`` denotes the dependencies of vdirsyncer. It also contains lower-bound versions of each dependency. Older versions will be rejected by the testsuite. Testing ======= Everything testing-related goes through the ``Makefile`` in the root of the repository or PyPI package. Trying to e.g. run ``pytest`` directly will require a lot of environment variables to be set (for configuration) and you probably don't want to deal with that. You can install the all development dependencies with:: make install-dev You probably don't want this since it will use pip to download the dependencies. Alternatively you can find the testing dependencies in ``test-requirements.txt``, again with lower-bound version requirements. You also have to have vdirsyncer fully installed at this point. Merely ``cd``-ing into the tarball will not be sufficient. Running the tests happens with:: make test Hypothesis will randomly generate test input. If you care about deterministic tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``:: make DETERMINISTIC_TESTS=true test There are a lot of additional variables that allow you to test vdirsyncer against a particular server. Those variables are not "stable" and may change drastically between minor versions. Just don't use them, you are unlikely to find bugs that vdirsyncer's CI hasn't found. Documentation ============= Using Sphinx_ you can generate the documentation you're reading right now in a variety of formats, such as HTML, PDF, or even as a manpage. That said, I only take care of the HTML docs' formatting. You can find a list of dependencies in ``docs-requirements.txt``. Again, you can install those using pip with:: make install-docs Then change into the ``docs/`` directory and build whatever format you want using the ``Makefile`` in there (run ``make`` for the formats you can build). .. _Sphinx: www.sphinx-doc.org/ Contrib files ============= Reference ``systemd.service`` and ``systemd.timer`` unit files are provided. It is recommended to install this if your distribution is systemd-based. vdirsyncer-0.18.0/docs/partial-sync.rst000066400000000000000000000045161406140636100200670ustar00rootroot00000000000000.. _partial_sync_tutorial: =============================== Syncing with read-only storages =============================== If you want to subscribe to a public, read-only `WebCAL `_-calendar but neither your server nor your calendar apps support that (or support it insufficiently), vdirsyncer can be used to synchronize such a public calendar ``A`` with a new calendar ``B`` of your own and keep ``B`` updated. Step 1: Create the target calendar ================================== First you need to create the calendar you want to sync the WebCAL-calendar with. Most servers offer a web interface for this. You then need to note the CalDAV URL of your calendar. Note that this URL should directly point to the calendar you just created, which means you would have one such URL for each calendar you have. Step 2: Creating the config =========================== Paste this into your vdirsyncer config:: [pair holidays] a = "holidays_public" b = "holidays_private" collections = null [storage holidays_public] type = "http" # The URL to your iCalendar file. url = "..." [storage holidays_private] type = "caldav" # The direct URL to your calendar. url = "..." # The credentials to your CalDAV server username = "..." password = "..." Then run ``vdirsyncer discover holidays`` and ``vdirsyncer sync holidays``, and your previously created calendar should be filled with events. Step 3: The partial_sync parameter ================================== .. versionadded:: 0.14 You may get into a situation where you want to hide or modify some events from your ``holidays`` calendar. If you try to do that at this point, you'll notice that vdirsyncer will revert any changes you've made after a few times of running ``sync``. This is because vdirsyncer wants to keep everything in sync, and it can't synchronize changes to the public holidays-calendar because it doesn't have the rights to do so. For such purposes you can set the ``partial_sync`` parameter to ``ignore``:: [pair holidays] a = "holidays_public" b = "holidays_private" collections = null partial_sync = ignore See :ref:`the config docs ` for more information. .. _nextCloud: https://nextcloud.com/ .. _Baikal: http://sabre.io/baikal/ .. _DAViCal: http://www.davical.org/ vdirsyncer-0.18.0/docs/problems.rst000066400000000000000000000012451406140636100173000ustar00rootroot00000000000000============== Known Problems ============== For any unanswered questions or problems, see :doc:`contact`. .. _debian-urllib3: Requests-related ImportErrors ----------------------------- ImportError: No module named packages.urllib3.poolmanager ImportError: cannot import name iter_field_objects Debian and nowadays even other distros make modifications to the ``requests`` package that don't play well with packages assuming a normal ``requests``. This is due to stubbornness on both sides. See :gh:`82` and :gh:`140` for past discussions. You have one option to work around this, that is, to install vdirsyncer in a virtualenv, see :ref:`manual-installation`. vdirsyncer-0.18.0/docs/ssl-tutorial.rst000066400000000000000000000052751406140636100201260ustar00rootroot00000000000000.. _ssl-tutorial: ============================== SSL and certificate validation ============================== All SSL configuration is done per-storage. Pinning by fingerprint ---------------------- To pin the certificate by fingerprint:: [storage foo] type = "caldav" ... verify_fingerprint = "94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA" #verify = false # Optional: Disable CA validation, useful for self-signed certs SHA1-, SHA256- or MD5-Fingerprints can be used. They're detected by their length. You can use the following command for obtaining a SHA-1 fingerprint:: echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint Note that ``verify_fingerprint`` doesn't suffice for vdirsyncer to work with self-signed certificates (or certificates that are not in your trust store). You most likely need to set ``verify = false`` as well. This disables verification of the SSL certificate's expiration time and the existence of it in your trust store, all that's verified now is the fingerprint. However, please consider using `Let's Encrypt `_ such that you can forget about all of that. It is easier to deploy a free certificate from them than configuring all of your clients to accept the self-signed certificate. .. _ssl-cas: Custom root CAs --------------- To point vdirsyncer to a custom set of root CAs:: [storage foo] type = "caldav" ... verify = "/path/to/cert.pem" Vdirsyncer uses the requests_ library, which, by default, `uses its own set of trusted CAs `_. However, the actual behavior depends on how you have installed it. Many Linux distributions patch their ``python-requests`` package to use the system certificate CAs. Normally these two stores are similar enough for you to not care. But there are cases where certificate validation fails even though you can access the server fine through e.g. your browser. This usually indicates that your installation of the ``requests`` library is somehow broken. In such cases, it makes sense to explicitly set ``verify`` or ``verify_fingerprint`` as shown above. .. _requests: http://www.python-requests.org/ .. _ssl-client-certs: Client Certificates ------------------- Client certificates may be specified with the ``auth_cert`` parameter. If the key and certificate are stored in the same file, it may be a string:: [storage foo] type = "caldav" ... auth_cert = "/path/to/certificate.pem" If the key and certificate are separate, a list may be used:: [storage foo] type = "caldav" ... auth_cert = ["/path/to/certificate.crt", "/path/to/key.key"] vdirsyncer-0.18.0/docs/tutorial.rst000066400000000000000000000247701406140636100173300ustar00rootroot00000000000000======== Tutorial ======== Before starting, :doc:`consider if you actually need vdirsyncer `. There are better alternatives available for particular usecases. Installation ============ See :ref:`installation`. Configuration ============= .. note:: - The `config.example from the repository `_ contains a very terse version of this. - In this example we set up contacts synchronization, but calendar sync works almost the same. Just swap ``type = "carddav"`` for ``type = "caldav"`` and ``fileext = ".vcf"`` for ``fileext = ".ics"``. - Take a look at the :doc:`problems` page if anything doesn't work like planned. By default, vdirsyncer looks for its configuration file in the following locations: - The file pointed to by the ``VDIRSYNCER_CONFIG`` environment variable. - ``~/.vdirsyncer/config``. - ``$XDG_CONFIG_HOME/vdirsyncer/config``, which is normally ``~/.config/vdirsyncer/config``. See the XDG-Basedir_ specification. .. _XDG-Basedir: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html#variables The config file should start with a :ref:`general section `, where the only required parameter is ``status_path``. The following is a minimal example:: [general] status_path = "~/.vdirsyncer/status/" After the general section, an arbitrary amount of *pair and storage sections* might come. In vdirsyncer, synchronization is always done between two storages. Such storages are defined in :ref:`storage sections `, and which pairs of storages should actually be synchronized is defined in :ref:`pair section `. This format is copied from OfflineIMAP, where storages are called repositories and pairs are called accounts. The following example synchronizes ownCloud's addressbooks to ``~/.contacts/``:: [pair my_contacts] a = "my_contacts_local" b = "my_contacts_remote" collections = ["from a", "from b"] [storage my_contacts_local] type = "filesystem" path = "~/.contacts/" fileext = ".vcf" [storage my_contacts_remote] type = "carddav" # We can simplify this URL here as well. In theory it shouldn't matter. url = "https://owncloud.example.com/remote.php/carddav/" username = "bob" password = "asdf" .. note:: Configuration for other servers can be found at :ref:`supported-servers`. After running ``vdirsyncer discover`` and ``vdirsyncer sync``, ``~/.contacts/`` will contain subfolders for each addressbook, which in turn will contain a bunch of ``.vcf`` files which all contain a contact in ``VCARD`` format each. You can modify their contents, add new ones and delete some [1]_, and your changes will be synchronized to the CalDAV server after you run ``vdirsyncer sync`` again. For further reference, it uses the storages :storage:`filesystem` and :storage:`carddav`. However, if new collections are created on the server, it will not automatically start synchronizing those [2]_. You need to run ``vdirsyncer discover`` again to re-fetch this list instead. .. [1] You'll want to :doc:`use a helper program for this `. .. [2] Because collections are added rarely, and checking for this case before every synchronization isn't worth the overhead. More Configuration ================== .. _conflict_resolution_tutorial: Conflict resolution ------------------- What if the same item is changed on both sides? What should vdirsyncer do? Three options are currently provided: 1. vdirsyncer displays an error message (the default); 2. vdirsyncer chooses one alternative version over the other; 3. vdirsyncer starts a command of your choice that is supposed to merge the two alternative versions. Options 2 and 3 require adding a ``"conflict_resolution"`` parameter to the pair section. Option 2 requires giving either ``"a wins"`` or ``"b wins"`` as value to the parameter:: [pair my_contacts] ... conflict_resolution = "b wins" Earlier we wrote that ``b = "my_contacts_remote"``, so when vdirsyncer encounters the situation where an item changed on both sides, it will simply overwrite the local item with the one from the server. Option 3 requires specifying as value of ``"conflict_resolution"`` an array starting with ``"command"`` and containing paths and arguments to a command. For example:: [pair my_contacts] ... conflict_resolution = ["command", "vimdiff"] In this example, ``vimdiff `` will be called with ```` and ```` being two temporary files containing the conflicting files. The files need to be exactly the same when the command returns. More arguments can be passed to the command by adding more elements to the array. See :ref:`pair_config` for the reference documentation. .. _metasync_tutorial: Metadata synchronization ------------------------ Besides items, vdirsyncer can also synchronize metadata like the addressbook's or calendar's "human-friendly" name (internally called "displayname") or the color associated with a calendar. For the purpose of explaining this feature, let's switch to a different base example. This time we'll synchronize calendars:: [pair my_calendars] a = "my_calendars_local" b = "my_calendars_remote" collections = ["from a", "from b"] metadata = ["color"] [storage my_calendars_local] type = "filesystem" path = "~/.calendars/" fileext = ".ics" [storage my_calendars_remote] type = "caldav" url = "https://owncloud.example.com/remote.php/caldav/" username = "bob" password = "asdf" Run ``vdirsyncer discover`` for discovery. Then you can use ``vdirsyncer metasync`` to synchronize the ``color`` property between your local calendars in ``~/.calendars/`` and your ownCloud. Locally the color is just represented as a file called ``color`` within the calendar folder. .. _collections_tutorial: More information about collections ---------------------------------- "Collection" is a collective term for addressbooks and calendars. Each collection from a storage has a "collection name", a unique identifier for each collection. In the case of :storage:`filesystem`-storage, this is the name of the directory that represents the collection, in the case of the DAV-storages this is the last segment of the URL. We use this identifier in the ``collections`` parameter in the ``pair``-section. This identifier doesn't change even if you rename your calendar in whatever UI you have, because that only changes the so-called "displayname" property [3]_. On some servers (iCloud, Google) this identifier is randomly generated and has no correlation with the displayname you chose. .. [3] Which you can also synchronize with ``metasync`` using ``metadata = ["displayname"]``. There are three collection names that have a special meaning: - ``"from a"``, ``"from b"``: A placeholder for all collections that can be found on side A/B when running ``vdirsyncer discover``. - ``null``: The parameters give to the storage are exact and require no discovery. The last one requires a bit more explanation. Assume this config which synchronizes two directories of addressbooks:: [pair foobar] a = "foo" b = "bar" collections = ["from a", "from b"] [storage foo] type = "filesystem" fileext = ".vcf" path = "./contacts_foo/" [storage bar] type = "filesystem" fileext = ".vcf" path = "./contacts_bar/" As we saw previously this will synchronize all collections in ``./contacts_foo/`` with each same-named collection in ``./contacts_bar/``. If there's a collection that exists on one side but not the other, vdirsyncer will ask whether to create that folder on the other side. If we set ``collections = null``, ``./contacts_foo/`` and ``./contacts_bar/`` are no longer treated as folders with collections, but as collections themselves. This means that ``./contacts_foo/`` and ``./contacts_bar/`` will contain ``.vcf``-files, not subfolders that contain ``.vcf``-files. This is useful in situations where listing all collections fails because your DAV-server doesn't support it, for example. In this case, you can set ``url`` of your :storage:`carddav`- or :storage:`caldav`-storage to a URL that points to your CalDAV/CardDAV collection directly. Note that not all storages support the ``null``-collection, for example :storage:`google_contacts` and :storage:`google_calendar` don't. Advanced collection configuration (server-to-server sync) --------------------------------------------------------- The examples above are good enough if you want to synchronize a remote server to a previously empty disk. However, even more trickery is required when you have two servers with *already existing* collections which you want to synchronize. The core problem in this situation is that vdirsyncer pairs collections by collection name by default (see definition in previous section, basically a foldername or a remote UUID). When you have two servers, those collection names may not line up as nicely. Suppose you created two calendars "Test", one on a NextCloud server and one on iCloud, using their respective web interfaces. The URLs look something like this:: NextCloud: https://example.com/remote.php/dav/calendars/user/test/ iCloud: https://p-XX.caldav.icloud.com/YYY/calendars/3b4c9995-5c67-4021-9fa0-be4633623e1c Those are two DAV calendar collections. Their collection names will be ``test`` and ``3b4c9995-5c67-4021-9fa0-be4633623e1c`` respectively, so you don't have a single name you can address them both with. You will need to manually "pair" (no pun intended) those collections up like this:: [pair doublecloud] a = "my_nextcloud" b = "my_icloud" collections = [["mytest", "test", "3b4c9995-5c67-4021-9fa0-be4633623e1c"]] ``mytest`` gives that combination of calendars a nice name you can use when talking about it, so you would use ``vdirsyncer sync doublecloud/mytest`` to say: "Only synchronize these two storages, nothing else that may be configured". .. note:: Why not use displaynames? You may wonder why vdirsyncer just couldn't figure this out by itself. After all, you did name both collections "Test" (which is called "the displayname"), so why not pair collections by that value? There are a few problems with this idea: - Two calendars may have the same exact displayname. - A calendar may not have a (non-empty) displayname. - The displayname might change. Either you rename the calendar, or the calendar renames itself because you change a language setting. In the end, that property was never designed to be parsed by machines. vdirsyncer-0.18.0/docs/tutorials/000077500000000000000000000000001406140636100167475ustar00rootroot00000000000000vdirsyncer-0.18.0/docs/tutorials/baikal.rst000066400000000000000000000004351406140636100207260ustar00rootroot00000000000000====== Baikal ====== Vdirsyncer is continuously tested against the latest version of Baikal_. - Baikal up to ``0.2.7`` also uses an old version of SabreDAV, with the same issue as ownCloud, see :gh:`160`. This issue is fixed in later versions. .. _Baikal: http://sabre.io/baikal/ vdirsyncer-0.18.0/docs/tutorials/claws-mail.rst000066400000000000000000000047671406140636100215500ustar00rootroot00000000000000.. _claws-mail-tutorial: Vdirsyncer with Claws Mail ========================== First of all, Claws-Mail only supports **read-only** functions for vCards. It can only read contacts, but there's no editor. Preparation ----------- We need to install vdirsyncer, for that look :doc:`here `. Then we need to create some folders:: mkdir ~/.vdirsyncer mkdir ~/.contacts Configuration ------------- Now we create the configuration for vdirsyncer. Open ``~/.vdirsyncer/config`` with a text editor. The config should look like this: .. code:: ini [general] status_path = "~/.vdirsyncer/status/" [storage local] type = "singlefile" path = "~/.contacts/%s.vcf" [storage online] type = "carddav" url = "CARDDAV_LINK" username = "USERNAME" password = "PASSWORD" read_only = true [pair contacts] a = "local" b = "online" collections = ["from a", "from b"] conflict_resolution = "b wins" - In the general section, we define the status folder path, for discovered collections and generally stuff that needs to persist between syncs. - In the local section we define that all contacts should be sync in a single file and the path for the contacts. - In the online section you must change the url, username and password to your setup. We also set the storage to read-only such that no changes get synchronized back. Claws-Mail should not be able to do any changes anyway, but this is one extra safety step in case files get corrupted or vdirsyncer behaves erratically. You can leave that part out if you want to be able to edit those files locally. - In the last section we configure that online contacts win in a conflict situation. Configure this part however you like. A correct value depends on which side is most likely to be up-to-date. Sync ---- Now we discover and sync our contacts:: vdirsyncer discover contacts vdirsyncer sync contacts Claws Mail ---------- Open Claws-Mail. Go to **Tools** => **Addressbook**. Click on **Addressbook** => **New vCard**. Choose a name for the book. Then search for the for the vCard in the folder **~/.contacts/**. Click ok, and you we will see your contacts. .. note:: Claws-Mail shows only contacts that have a mail address. Crontab ------- On the end we create a crontab, so that vdirsyncer syncs automatically every 30 minutes our contacts:: crontab -e On the end of that file enter this line:: */30 * * * * /usr/local/bin/vdirsyncer sync > /dev/null And you're done! vdirsyncer-0.18.0/docs/tutorials/davmail.rst000066400000000000000000000035171406140636100211240ustar00rootroot00000000000000.. _davmail_setup: =========================== DavMail (Exchange, Outlook) =========================== DavMail_ is a proxy program that allows you to use Card- and CalDAV clients with Outlook. That allows you to use vdirsyncer with Outlook. In practice your success with DavMail may wildly vary. Depending on your Exchange server you might get confronted with weird errors of all sorts (including data-loss). **Make absolutely sure you use the latest DavMail**:: [storage outlook] type = "caldav" url = "http://localhost:1080/users/user@example.com/calendar/" username = "user@example.com" password = "..." - Older versions of DavMail handle URLs case-insensitively. See :gh:`144`. - DavMail is handling malformed data on the Exchange server very poorly. In such cases the `Calendar Checking Tool for Outlook `_ might help. - In some cases, you may see errors about duplicate events. It may look something like this:: error: my_calendar/calendar: Storage "my_calendar_remote/calendar" contains multiple items with the same UID or even content. Vdirsyncer will now abort the synchronization of this collection, because the fix for this is not clear; It could be the result of a badly behaving server. You can try running: error: error: vdirsyncer repair my_calendar_remote/calendar error: error: But make sure to have a backup of your data in some form. The offending hrefs are: [...] In order to fix this, you can try the Remove-DuplicateAppointments.ps1_ PowerShell script that Microsoft has come up with in order to remove duplicates. .. _DavMail: http://davmail.sourceforge.net/ .. _Remove-DuplicateAppointments.ps1: https://blogs.msdn.microsoft.com/emeamsgdev/2015/02/12/powershell-remove-duplicate-calendar-appointments/ vdirsyncer-0.18.0/docs/tutorials/fastmail.rst000066400000000000000000000011171406140636100213010ustar00rootroot00000000000000======== FastMail ======== Vdirsyncer is continuously tested against FastMail_, thanks to them for providing a free account for this purpose. There are no known issues with it. `FastMail's support pages `_ provide the settings to use:: [storage cal] type = "caldav" url = "https://caldav.fastmail.com/" username = "..." password = "..." [storage card] type = "carddav" url = "https://carddav.fastmail.com/" username = "..." password = "..." .. _FastMail: https://www.fastmail.com/ vdirsyncer-0.18.0/docs/tutorials/google.rst000066400000000000000000000003551406140636100207600ustar00rootroot00000000000000====== Google ====== Using vdirsyncer with Google Calendar is possible as of 0.10, but it is not tested frequently. You can use :storage:`google_contacts` and :storage:`google_calendar`. For more information see :gh:`202` and :gh:`8`. vdirsyncer-0.18.0/docs/tutorials/icloud.rst000066400000000000000000000016411406140636100207620ustar00rootroot00000000000000.. _icloud_setup: ====== iCloud ====== Vdirsyncer is regularly tested against iCloud_. :: [storage cal] type = "caldav" url = "https://caldav.icloud.com/" username = "..." password = "..." [storage card] type = "carddav" url = "https://contacts.icloud.com/" username = "..." password = "..." Problems: - Vdirsyncer can't do two-factor auth with iCloud (there doesn't seem to be a way to do two-factor auth over the DAV APIs) You'll need to use `app-specific passwords `_ instead. - iCloud has a few special requirements when creating collections. In principle vdirsyncer can do it, but it is recommended to create them from an Apple client (or the iCloud web interface). - iCloud requires a minimum length of collection names. - Calendars created by vdirsyncer cannot be used as tasklists. .. _iCloud: https://www.icloud.com/ vdirsyncer-0.18.0/docs/tutorials/index.rst000066400000000000000000000034701406140636100206140ustar00rootroot00000000000000=============== Other tutorials =============== The following section contains tutorials not explicitly about any particular core function of vdirsyncer. They usually show how to integrate vdirsyncer with third-party software. Because of that, it may be that the information regarding that other software only applies to specific versions of them. .. note:: Please :doc:`contribute ` your own tutorials too! Pages are often only stubs and are lacking full examples. Client applications =================== .. toctree:: :maxdepth: 1 claws-mail systemd-timer todoman Further applications, with missing pages: - khal_, a CLI calendar application supporting :doc:`vdir `. You can use :storage:`filesystem` with it. - Many graphical calendar apps such as dayplanner_, Orage_ or rainlendar_ save a calendar in a single ``.ics`` file. You can use :storage:`singlefile` with those. - khard_, a commandline addressbook supporting :doc:`vdir `. You can use :storage:`filesystem` with it. - contactquery.c_, a small program explicitly written for querying vdirs from mutt. - mates_, a commandline addressbook supporting :doc:`vdir `. - vdirel_, access :doc:`vdir ` contacts from Emacs. .. _khal: http://lostpackets.de/khal/ .. _dayplanner: http://www.day-planner.org/ .. _Orage: http://www.kolumbus.fi/~w408237/orage/ .. _rainlendar: http://www.rainlendar.net/ .. _khard: https://github.com/scheibler/khard/ .. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c .. _mates: https://github.com/pimutils/mates.rs .. _vdirel: https://github.com/DamienCassou/vdirel .. _supported-servers: Servers ======= .. toctree:: :maxdepth: 1 baikal davmail fastmail google icloud nextcloud owncloud radicale xandikos vdirsyncer-0.18.0/docs/tutorials/nextcloud.rst000066400000000000000000000007561406140636100215160ustar00rootroot00000000000000========= nextCloud ========= Vdirsyncer is continuously tested against the latest version of nextCloud_:: [storage cal] type = "caldav" url = "https://nextcloud.example.com/" username = "..." password = "..." [storage card] type = "carddav" url = "https://nextcloud.example.com/" - WebCAL-subscriptions can't be discovered by vdirsyncer. See `this relevant issue `_. .. _nextCloud: https://nextcloud.com/ vdirsyncer-0.18.0/docs/tutorials/owncloud.rst000066400000000000000000000012641406140636100213360ustar00rootroot00000000000000.. _owncloud_setup: ======== ownCloud ======== Vdirsyncer is continuously tested against the latest version of ownCloud_:: [storage cal] type = "caldav" url = "https://example.com/remote.php/dav/" username = ... password = ... [storage card] type = "carddav" url = "https://example.com/remote.php/dav/" username = ... password = ... - *Versions older than 7.0.0:* ownCloud uses SabreDAV, which had problems detecting collisions and race-conditions. The problems were reported and are fixed in SabreDAV's repo, and the corresponding fix is also in ownCloud since 7.0.0. See :gh:`16` for more information. .. _ownCloud: https://owncloud.org/ vdirsyncer-0.18.0/docs/tutorials/radicale.rst000066400000000000000000000021241406140636100212440ustar00rootroot00000000000000======== Radicale ======== Radicale_ is a very lightweight server, however, it intentionally doesn't implement the CalDAV and CardDAV standards completely, which might lead to issues even with very well-written clients. Apart from its non-conformity with standards, there are multiple other problems with its code quality and the way it is maintained. Consider using e.g. :doc:`xandikos` instead. That said, vdirsyncer is continuously tested against the git version and the latest PyPI release of Radicale. - Vdirsyncer can't create collections on Radicale. - Radicale doesn't `support time ranges in the calendar-query of CalDAV `_, so setting ``start_date`` and ``end_date`` for :storage:`caldav` will have no or unpredicted consequences. - `Versions of Radicale older than 0.9b1 choke on RFC-conform queries for all items of a collection `_. You have to set ``item_types = ["VTODO", "VEVENT"]`` in :storage:`caldav` for vdirsyncer to work with those versions. .. _Radicale: http://radicale.org/ vdirsyncer-0.18.0/docs/tutorials/systemd-timer.rst000066400000000000000000000025231406140636100223110ustar00rootroot00000000000000.. _systemd_timer-tutorial: Running as a systemd.timer ========================== vdirsyncer includes unit files to run at an interval (by default every 15±5 minutes). .. note:: These are not installed when installing via pip, only via distribution packages. If you installed via pip, or your distribution doesn't ship systemd unit files, you'll need to download vdirsyncer.service_ and vdirsyncer.timer_ into either ``/etc/systemd/user/`` or ``~/.local/share/systemd/user``. .. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/master/contrib/vdirsyncer.service .. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/master/contrib/vdirsyncer.timer Activation ---------- To activate the timer, just run ``systemctl --user enable vdirsyncer.timer``. To see logs of previous runs, use ``journalctl --user -u vdirsyncer``. Configuration ------------- It's quite possible that the default "every fifteen minutes" interval isn't to your liking. No default will suit everybody, but this is configurable by simply running:: systemctl --user edit vdirsyncer.timer This will open a blank editor, where you can override the timer by including:: OnBootSec=5m # This is how long after boot the first run takes place. OnUnitActiveSec=15m # This is how often subsequent runs take place. vdirsyncer-0.18.0/docs/tutorials/todoman.rst000066400000000000000000000036551406140636100211530ustar00rootroot00000000000000======= Todoman ======= The iCalendar format also supports saving tasks in form of ``VTODO``-entries, with the same file extension as normal events: ``.ics``. Many CalDAV servers support synchronizing tasks, vdirsyncer does too. todoman_ is a CLI task manager supporting :doc:`vdir `. Its interface is similar to the ones of Taskwarrior or the todo.txt CLI app. You can use :storage:`filesystem` with it. .. _todoman: http://todoman.readthedocs.io/ Setting up vdirsyncer ===================== For this tutorial we will use NextCloud. Assuming a config like this:: [general] status_path = "~/.vdirsyncer/status/" [pair calendars] conflict_resolution = "b wins" a = "calendars_local" b = "calendars_dav" collections = ["from b"] metadata = ["color", "displayname"] [storage calendars_local] type = "filesystem" path = "~/.calendars/" fileext = ".ics" [storage calendars_dav] type = "caldav" url = "https://nextcloud.example.net/" username = "..." password = "..." ``vdirsyncer sync`` will then synchronize the calendars of your NextCloud_ instance to subfolders of ``~/.calendar/``. .. _NextCloud: https://nextcloud.com/ Setting up todoman ================== Write this to ``~/.config/todoman/todoman.conf``:: [main] path = ~/.calendars/* The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``, which is exactly the tasklists we want. Now you can use ``todoman`` as described in its documentation_ and run ``vdirsyncer sync`` to synchronize the changes to NextCloud. .. _glob: https://en.wikipedia.org/wiki/Glob_(programming) .. _documentation: http://todoman.readthedocs.io/ Other clients ============= The following client applications also synchronize over CalDAV: - The Tasks-app found on iOS - `OpenTasks for Android `_ - The `Tasks `_-app for NextCloud's web UI vdirsyncer-0.18.0/docs/tutorials/xandikos.rst000066400000000000000000000011421406140636100213170ustar00rootroot00000000000000======== Xandikos ======== Xandikos_ is a lightweight, yet complete CalDAV and CardDAV server, backed by git. Vdirsyncer is continuously tested against its latest version. After running ``./bin/xandikos --defaults -d $HOME/dav``, you should be able to point vdirsyncer against the root of Xandikos like this:: [storage cal] type = "caldav" url = "https://xandikos.example.com/" username = "..." password = "..." [storage card] type = "carddav" url = "https://xandikos.example.com/" username = "..." password = "..." .. _Xandikos: https://github.com/jelmer/xandikos vdirsyncer-0.18.0/docs/vdir.rst000066400000000000000000000075541406140636100164320ustar00rootroot00000000000000======================= The Vdir Storage Format ======================= This document describes a standard for storing calendars and contacts on a filesystem, with the main goal of being easy to implement. Vdirsyncer synchronizes to vdirs via :storage:`filesystem`. Each vdir (basically just a directory with some files in it) represents a calendar or addressbook. Basic Structure =============== The main folder (root) contains an arbitrary number of subfolders (collections), which contain only files (items). Synonyms for "collection" may be "addressbook" or "calendar". An item is: - A vCard_ file, in which case the file extension *must* be `.vcf`, *or* - An iCalendar_ file, in which case the file extension *must* be `.ics`. An item *should* contain a ``UID`` property as described by the vCard and iCalendar standards. If it contains more than one ``UID`` property, the values of those *must* not differ. The file *must* contain exactly one event, task or contact. In most cases this also implies only one ``VEVENT``/``VTODO``/``VCARD`` component per file, but e.g. recurrence exceptions would require multiple ``VEVENT`` components per event. The filename should have similar properties as the ``UID`` of the file content. However, there is no requirement for these two to be the same. Programs may choose to store additional metadata in that filename, however, at the same time they *must not* assume that the metadata they included will be preserved by other programs. .. _vCard: https://tools.ietf.org/html/rfc6350 .. _iCalendar: https://tools.ietf.org/html/rfc5545 .. _CardDAV: http://tools.ietf.org/html/rfc6352 .. _CalDAV: http://tools.ietf.org/search/rfc4791 Metadata ======== Any of the below metadata files may be absent. None of the files listed below have any file extensions. - A file called ``color`` inside the vdir indicates the vdir's color, a property that is only relevant in UI design. Its content is an ASCII-encoded hex-RGB value of the form ``#RRGGBB``. For example, a file content of ``#FF0000`` indicates that the vdir has a red (user-visible) color. No short forms or informal values such as ``red`` (as known from CSS, for example) are allowed. The prefixing ``#`` must be present. - A file called ``displayname`` contains a UTF-8 encoded label that may be used to represent the vdir in UIs. Writing to vdirs ================ Creating and modifying items or metadata files *should* happen atomically_. Writing to a temporary file on the same physical device, and then moving it to the appropriate location is usually a very effective solution. For this purpose, files with the extension ``.tmp`` may be created inside collections. When changing an item, the original filename *must* be used. .. _atomically: https://en.wikipedia.org/wiki/Atomicity_%28programming%29 Reading from vdirs ================== - Any file ending with the ``.tmp`` or no file extension *must not* be treated as an item. - The ``ident`` part of the filename *should not* be parsed to improve the speed of item lookup. Considerations ============== The primary reason this format was chosen is due to its compatibility with the CardDAV_ and CalDAV_ standards. Performance ----------- Currently, vdirs suffer from a rather major performance problem, one which current implementations try to mitigate by building up indices of the collections for faster search and lookup. The reason items' filenames don't contain any extra information is simple: The solutions presented induced duplication of data, where one duplicate might become out of date because of bad implementations. As it stands right now, an index format could be formalized separately though. vdirsyncer doesn't really have to bother about efficient item lookup, because its synchronization algorithm needs to fetch the whole list of items anyway. Detecting changes is easily implemented by checking the files' modification time. vdirsyncer-0.18.0/docs/when.rst000066400000000000000000000046071406140636100164230ustar00rootroot00000000000000========================== When do I need Vdirsyncer? ========================== Why not Dropbox + todo.txt? --------------------------- Projects like `todo.txt `_ criticize the complexity of modern productivity apps, and that rightfully. So they set out to create a new, super-simple, human-readable format, such that vim suffices for viewing the raw data. However, when they're faced with the question how to synchronize that data across multiple devices, they seemed to have reached the dead end with their novel idea: "Let's just use Dropbox". What does file sync software do if both files have changed since the last sync? The answer is to ignore the question, just sync as often as possible, and hope for the best. Because if it comes to a sync conflict, most sync services are not daring to merge files, and create two copies on each computer instead. Merging the two task lists is left to the user. A better idea would've been to use ``git`` to synchronize the ``todo.txt`` file, which is at least able to resolve some basic conflicts. Why not file sync (Dropbox, git, ...) + vdir? --------------------------------------------- Since :doc:`vdirs ` are just a bunch of files, it is obvious to try *file synchronization* for synchronizing your data between multiple computers, such as: * `Syncthing `_ * `Dropbox `_ or one of the gajillion services like it * `unison `_ * Just ``git`` with a ``sshd``. The disadvantages of those solutions largely depend on the exact file sync program chosen: * Like with ``todo.txt``, Dropbox and friends are obviously agnostic/unaware of the files' contents. If a file has changed on both sides, Dropbox just copies both versions to both sides. This is a good idea if the user is directly interfacing with the file system and is able to resolve conflicts themselves. Here it might lead to erroneous behavior with e.g. ``khal``, since there are now two events with the same UID. This point doesn't apply to git: It has very good merging capabilities, better than what vdirsyncer currently has. * Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with e.g. DAVx⁵_ or the apps by dmfs_. .. _DAVx⁵: https://www.davx5.com/ .. _dmfs: https://dmfs.org/ vdirsyncer-0.18.0/scripts/000077500000000000000000000000001406140636100154605ustar00rootroot00000000000000vdirsyncer-0.18.0/scripts/dpkg.Dockerfile000066400000000000000000000023141406140636100203760ustar00rootroot00000000000000ARG distro ARG distrover FROM $distro:$distrover RUN apt-get update RUN apt-get install -y build-essential fakeroot debhelper git RUN apt-get install -y python3-all python3-pip python3-venv RUN apt-get install -y ruby ruby-dev RUN gem install fpm package_cloud RUN pip3 install virtualenv virtualenv-tools3 RUN virtualenv -p python3 /vdirsyncer/env/ # See https://github.com/jordansissel/fpm/issues/1106#issuecomment-461678970 RUN pip3 uninstall -y virtualenv RUN echo 'python3 -m venv "$@"' > /usr/local/bin/virtualenv RUN chmod +x /usr/local/bin/virtualenv COPY . /vdirsyncer/vdirsyncer/ WORKDIR /vdirsyncer/vdirsyncer/ RUN mkdir /vdirsyncer/pkgs/ RUN basename *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version RUN (echo -n *.tar.gz; echo '[google]') | tee requirements.txt RUN fpm --verbose \ --input-type virtualenv \ --output-type deb \ --name "vdirsyncer-latest" \ --version "$(cat version)" \ --prefix /opt/venvs/vdirsyncer-latest \ --depends python3 \ requirements.txt RUN mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/ WORKDIR /vdirsyncer/pkgs/ RUN dpkg -i *.deb # Check that it works: RUN LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version vdirsyncer-0.18.0/scripts/release-deb.sh000066400000000000000000000011301406140636100201570ustar00rootroot00000000000000#!/bin/sh set -xe DISTRO=$1 DISTROVER=$2 NAME="vdirsyncer-${DISTRO}-${DISTROVER}:latest" CONTEXT="$(mktemp -d)" python setup.py sdist -d "$CONTEXT" # Build the package in a container with the right distro version. docker build \ --build-arg distro=$DISTRO \ --build-arg distrover=$DISTROVER \ -t $NAME \ -f scripts/dpkg.Dockerfile \ "$CONTEXT" # Push the package to packagecloud. # TODO: Use ~/.packagecloud for CI. docker run -e PACKAGECLOUD_TOKEN=$PACKAGECLOUD_TOKEN $NAME \ bash -xec "package_cloud push pimutils/vdirsyncer/$DISTRO/$DISTROVER *.deb" rm -rf "$CONTEXT" vdirsyncer-0.18.0/setup.cfg000066400000000000000000000006211406140636100156110ustar00rootroot00000000000000[wheel] universal = 1 [tool:pytest] addopts = --tb=short --cov-config .coveragerc --cov=vdirsyncer --cov-report=term-missing --no-cov-on-fail [flake8] application-import-names = tests,vdirsyncer extend-ignore = E203, # Black-incompatible colon spacing. W503, # Line jump before binary operator. I100, I202 max-line-length = 88 exclude = .eggs,build import-order-style = smarkets vdirsyncer-0.18.0/setup.py000066400000000000000000000047171406140636100155140ustar00rootroot00000000000000""" Vdirsyncer synchronizes calendars and contacts. Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for how to package vdirsyncer. """ from setuptools import Command from setuptools import find_packages from setuptools import setup requirements = [ # https://github.com/mitsuhiko/click/issues/200 "click>=5.0,<9.0", "click-log>=0.3.0, <0.4.0", # https://github.com/pimutils/vdirsyncer/issues/478 "click-threading>=0.5", "requests >=2.20.0", # https://github.com/sigmavirus24/requests-toolbelt/pull/28 # And https://github.com/sigmavirus24/requests-toolbelt/issues/54 "requests_toolbelt >=0.4.0", # https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa "atomicwrites>=0.1.7", ] class PrintRequirements(Command): description = "Prints minimal requirements" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): for requirement in requirements: print(requirement.replace(">", "=").replace(" ", "")) with open("README.rst") as f: long_description = f.read() setup( # General metadata name="vdirsyncer", author="Markus Unterwaditzer", author_email="markus@unterwaditzer.net", url="https://github.com/pimutils/vdirsyncer", description="Synchronize calendars and contacts", license="BSD", long_description=long_description, # Runtime dependencies install_requires=requirements, # Optional dependencies extras_require={ "google": ["requests-oauthlib"], "etesync": ["etesync==0.5.2", "django<2.0"], }, # Build dependencies setup_requires=["setuptools_scm != 1.12.0"], # Other packages=find_packages(exclude=["tests.*", "tests"]), include_package_data=True, cmdclass={"minimal_requirements": PrintRequirements}, use_scm_version={"write_to": "vdirsyncer/version.py"}, entry_points={"console_scripts": ["vdirsyncer = vdirsyncer.cli:main"]}, classifiers=[ "Development Status :: 4 - Beta", "Environment :: Console", "License :: OSI Approved :: BSD License", "Operating System :: POSIX", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Internet", "Topic :: Utilities", ], ) vdirsyncer-0.18.0/test-requirements.txt000066400000000000000000000000761406140636100202350ustar00rootroot00000000000000hypothesis>=5.0.0,<7.0.0 pytest pytest-cov pytest-localserver vdirsyncer-0.18.0/tests/000077500000000000000000000000001406140636100151335ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/__init__.py000066400000000000000000000041301406140636100172420ustar00rootroot00000000000000""" Test suite for vdirsyncer. """ import hypothesis.strategies as st import urllib3.exceptions from vdirsyncer.vobject import normalize_item urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) def blow_up(*a, **kw): raise AssertionError("Did not expect to be called.") def assert_item_equals(a, b): assert normalize_item(a) == normalize_item(b) VCARD_TEMPLATE = """BEGIN:VCARD VERSION:3.0 FN:Cyrus Daboo N:Daboo;Cyrus;;; ADR;TYPE=POSTAL:;2822 Email HQ;Suite 2821;RFCVille;PA;15213;USA EMAIL;TYPE=PREF:cyrus@example.com NICKNAME:me NOTE:Example VCard. ORG:Self Employed TEL;TYPE=VOICE:412 605 0499 TEL;TYPE=FAX:412 605 0705 URL;VALUE=URI:http://www.example.com X-SOMETHING:{r} UID:{uid} END:VCARD""" TASK_TEMPLATE = """BEGIN:VCALENDAR VERSION:2.0 PRODID:-//dmfs.org//mimedir.icalendar//EN BEGIN:VTODO CREATED:20130721T142233Z DTSTAMP:20130730T074543Z LAST-MODIFIED;VALUE=DATE-TIME:20140122T151338Z SEQUENCE:2 SUMMARY:Book: Kowlani - Tödlicher Staub X-SOMETHING:{r} UID:{uid} END:VTODO END:VCALENDAR""" BARE_EVENT_TEMPLATE = """BEGIN:VEVENT DTSTART:19970714T170000Z DTEND:19970715T035959Z SUMMARY:Bastille Day Party X-SOMETHING:{r} UID:{uid} END:VEVENT""" EVENT_TEMPLATE = ( """BEGIN:VCALENDAR VERSION:2.0 PRODID:-//hacksw/handcal//NONSGML v1.0//EN """ + BARE_EVENT_TEMPLATE + """ END:VCALENDAR""" ) EVENT_WITH_TIMEZONE_TEMPLATE = ( """BEGIN:VCALENDAR BEGIN:VTIMEZONE TZID:Europe/Rome X-LIC-LOCATION:Europe/Rome BEGIN:DAYLIGHT TZOFFSETFROM:+0100 TZOFFSETTO:+0200 TZNAME:CEST DTSTART:19700329T020000 RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 END:DAYLIGHT BEGIN:STANDARD TZOFFSETFROM:+0200 TZOFFSETTO:+0100 TZNAME:CET DTSTART:19701025T030000 RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 END:STANDARD END:VTIMEZONE """ + BARE_EVENT_TEMPLATE + """ END:VCALENDAR""" ) SIMPLE_TEMPLATE = """BEGIN:FOO UID:{uid} X-SOMETHING:{r} HAHA:YES END:FOO""" printable_characters_strategy = st.text( st.characters(blacklist_categories=("Cc", "Cs")) ) uid_strategy = st.text( st.characters(blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1 ).filter(lambda x: x.strip() == x) vdirsyncer-0.18.0/tests/conftest.py000066400000000000000000000021741406140636100173360ustar00rootroot00000000000000""" General-purpose fixtures for vdirsyncer's testsuite. """ import logging import os import click_log import pytest from hypothesis import HealthCheck from hypothesis import settings from hypothesis import Verbosity @pytest.fixture(autouse=True) def setup_logging(): click_log.basic_config("vdirsyncer").setLevel(logging.DEBUG) try: import pytest_benchmark except ImportError: @pytest.fixture def benchmark(): return lambda x: x() else: del pytest_benchmark settings.register_profile( "ci", settings( max_examples=1000, verbosity=Verbosity.verbose, suppress_health_check=[HealthCheck.too_slow], ), ) settings.register_profile( "deterministic", settings( derandomize=True, suppress_health_check=HealthCheck.all(), ), ) settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow])) if os.environ.get("DETERMINISTIC_TESTS", "false").lower() == "true": settings.load_profile("deterministic") elif os.environ.get("CI", "false").lower() == "true": settings.load_profile("ci") else: settings.load_profile("dev") vdirsyncer-0.18.0/tests/storage/000077500000000000000000000000001406140636100165775ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/__init__.py000066400000000000000000000273571406140636100207260ustar00rootroot00000000000000import random import textwrap import uuid from urllib.parse import quote as urlquote from urllib.parse import unquote as urlunquote import pytest from .. import assert_item_equals from .. import EVENT_TEMPLATE from .. import normalize_item from .. import TASK_TEMPLATE from .. import VCARD_TEMPLATE from vdirsyncer import exceptions from vdirsyncer.storage.base import normalize_meta_value from vdirsyncer.vobject import Item def get_server_mixin(server_name): from . import __name__ as base x = __import__(f"{base}.servers.{server_name}", fromlist=[""]) return x.ServerMixin def format_item(item_template, uid=None): # assert that special chars are handled correctly. r = random.random() return Item(item_template.format(r=r, uid=uid or r)) class StorageTests: storage_class = None supports_collections = True supports_metadata = True @pytest.fixture(params=["VEVENT", "VTODO", "VCARD"]) def item_type(self, request): """Parametrize with all supported item types.""" return request.param @pytest.fixture def get_storage_args(self): """ Return a function with the following properties: :param collection: The name of the collection to create and use. """ raise NotImplementedError() @pytest.fixture def s(self, get_storage_args): return self.storage_class(**get_storage_args()) @pytest.fixture def get_item(self, item_type): template = { "VEVENT": EVENT_TEMPLATE, "VTODO": TASK_TEMPLATE, "VCARD": VCARD_TEMPLATE, }[item_type] return lambda **kw: format_item(template, **kw) @pytest.fixture def requires_collections(self): if not self.supports_collections: pytest.skip("This storage does not support collections.") @pytest.fixture def requires_metadata(self): if not self.supports_metadata: pytest.skip("This storage does not support metadata.") def test_generic(self, s, get_item): items = [get_item() for i in range(1, 10)] hrefs = [] for item in items: href, etag = s.upload(item) if etag is None: _, etag = s.get(href) hrefs.append((href, etag)) hrefs.sort() assert hrefs == sorted(s.list()) for href, etag in hrefs: assert isinstance(href, (str, bytes)) assert isinstance(etag, (str, bytes)) assert s.has(href) item, etag2 = s.get(href) assert etag == etag2 def test_empty_get_multi(self, s): assert list(s.get_multi([])) == [] def test_get_multi_duplicates(self, s, get_item): href, etag = s.upload(get_item()) if etag is None: _, etag = s.get(href) ((href2, item, etag2),) = s.get_multi([href] * 2) assert href2 == href assert etag2 == etag def test_upload_already_existing(self, s, get_item): item = get_item() s.upload(item) with pytest.raises(exceptions.PreconditionFailed): s.upload(item) def test_upload(self, s, get_item): item = get_item() href, etag = s.upload(item) assert_item_equals(s.get(href)[0], item) def test_update(self, s, get_item): item = get_item() href, etag = s.upload(item) if etag is None: _, etag = s.get(href) assert_item_equals(s.get(href)[0], item) new_item = get_item(uid=item.uid) new_etag = s.update(href, new_item, etag) if new_etag is None: _, new_etag = s.get(href) # See https://github.com/pimutils/vdirsyncer/issues/48 assert isinstance(new_etag, (bytes, str)) assert_item_equals(s.get(href)[0], new_item) def test_update_nonexisting(self, s, get_item): item = get_item() with pytest.raises(exceptions.PreconditionFailed): s.update("huehue", item, '"123"') def test_wrong_etag(self, s, get_item): item = get_item() href, etag = s.upload(item) with pytest.raises(exceptions.PreconditionFailed): s.update(href, item, '"lolnope"') with pytest.raises(exceptions.PreconditionFailed): s.delete(href, '"lolnope"') def test_delete(self, s, get_item): href, etag = s.upload(get_item()) s.delete(href, etag) assert not list(s.list()) def test_delete_nonexisting(self, s, get_item): with pytest.raises(exceptions.PreconditionFailed): s.delete("1", '"123"') def test_list(self, s, get_item): assert not list(s.list()) href, etag = s.upload(get_item()) if etag is None: _, etag = s.get(href) assert list(s.list()) == [(href, etag)] def test_has(self, s, get_item): assert not s.has("asd") href, etag = s.upload(get_item()) assert s.has(href) assert not s.has("asd") s.delete(href, etag) assert not s.has(href) def test_update_others_stay_the_same(self, s, get_item): info = {} for _ in range(4): href, etag = s.upload(get_item()) if etag is None: _, etag = s.get(href) info[href] = etag assert { href: etag for href, item, etag in s.get_multi(href for href, etag in info.items()) } == info def test_repr(self, s, get_storage_args): assert self.storage_class.__name__ in repr(s) assert s.instance_name is None def test_discover(self, requires_collections, get_storage_args, get_item): collections = set() for i in range(1, 5): collection = f"test{i}" s = self.storage_class(**get_storage_args(collection=collection)) assert not list(s.list()) s.upload(get_item()) collections.add(s.collection) actual = { c["collection"] for c in self.storage_class.discover(**get_storage_args(collection=None)) } assert actual >= collections def test_create_collection(self, requires_collections, get_storage_args, get_item): if getattr(self, "dav_server", "") in ("icloud", "fastmail", "davical"): pytest.skip("Manual cleanup would be necessary.") if getattr(self, "dav_server", "") == "radicale": pytest.skip("Radicale does not support collection creation") args = get_storage_args(collection=None) args["collection"] = "test" s = self.storage_class(**self.storage_class.create_collection(**args)) href = s.upload(get_item())[0] assert href in (href for href, etag in s.list()) def test_discover_collection_arg(self, requires_collections, get_storage_args): args = get_storage_args(collection="test2") with pytest.raises(TypeError) as excinfo: list(self.storage_class.discover(**args)) assert "collection argument must not be given" in str(excinfo.value) def test_collection_arg(self, get_storage_args): if self.storage_class.storage_name.startswith("etesync"): pytest.skip("etesync uses UUIDs.") if self.supports_collections: s = self.storage_class(**get_storage_args(collection="test2")) # Can't do stronger assertion because of radicale, which needs a # fileextension to guess the collection type. assert "test2" in s.collection else: with pytest.raises(ValueError): self.storage_class(collection="ayy", **get_storage_args()) def test_case_sensitive_uids(self, s, get_item): if s.storage_name == "filesystem": pytest.skip("Behavior depends on the filesystem.") uid = str(uuid.uuid4()) s.upload(get_item(uid=uid.upper())) s.upload(get_item(uid=uid.lower())) items = [href for href, etag in s.list()] assert len(items) == 2 assert len(set(items)) == 2 def test_specialchars( self, monkeypatch, requires_collections, get_storage_args, get_item ): if getattr(self, "dav_server", "") == "radicale": pytest.skip("Radicale is fundamentally broken.") if getattr(self, "dav_server", "") in ("icloud", "fastmail"): pytest.skip("iCloud and FastMail reject this name.") monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x) uid = "test @ foo ät bar град сатану" collection = "test @ foo ät bar" s = self.storage_class(**get_storage_args(collection=collection)) item = get_item(uid=uid) href, etag = s.upload(item) item2, etag2 = s.get(href) if etag is not None: assert etag2 == etag assert_item_equals(item2, item) ((_, etag3),) = s.list() assert etag2 == etag3 # etesync uses UUIDs for collection names if self.storage_class.storage_name.startswith("etesync"): return assert collection in urlunquote(s.collection) if self.storage_class.storage_name.endswith("dav"): assert urlquote(uid, "/@:") in href def test_metadata(self, requires_metadata, s): if not getattr(self, "dav_server", ""): assert not s.get_meta("color") assert not s.get_meta("displayname") try: s.set_meta("color", None) assert not s.get_meta("color") s.set_meta("color", "#ff0000") assert s.get_meta("color") == "#ff0000" except exceptions.UnsupportedMetadataError: pass for x in ("hello world", "hello wörld"): s.set_meta("displayname", x) rv = s.get_meta("displayname") assert rv == x assert isinstance(rv, str) @pytest.mark.parametrize( "value", [ None, "", "Hello there!", "Österreich", "中国", "한글", "42a4ec99-b1c2-4859-b142-759112f2ca50", "فلسطين", ], ) def test_metadata_normalization(self, requires_metadata, s, value): x = s.get_meta("displayname") assert x == normalize_meta_value(x) if not getattr(self, "dav_server", None): # ownCloud replaces "" with "unnamed" s.set_meta("displayname", value) assert s.get_meta("displayname") == normalize_meta_value(value) def test_recurring_events(self, s, item_type): if item_type != "VEVENT": pytest.skip("This storage instance doesn't support iCalendar.") uid = str(uuid.uuid4()) item = Item( textwrap.dedent( """ BEGIN:VCALENDAR VERSION:2.0 BEGIN:VEVENT DTSTART;TZID=UTC:20140325T084000Z DTEND;TZID=UTC:20140325T101000Z DTSTAMP:20140327T060506Z UID:{uid} RECURRENCE-ID;TZID=UTC:20140325T083000Z CREATED:20131216T033331Z DESCRIPTION: LAST-MODIFIED:20140327T060215Z LOCATION: SEQUENCE:1 STATUS:CONFIRMED SUMMARY:test Event TRANSP:OPAQUE END:VEVENT BEGIN:VEVENT DTSTART;TZID=UTC:20140128T083000Z DTEND;TZID=UTC:20140128T100000Z RRULE:FREQ=WEEKLY;UNTIL=20141208T213000Z;BYDAY=TU DTSTAMP:20140327T060506Z UID:{uid} CREATED:20131216T033331Z DESCRIPTION: LAST-MODIFIED:20140222T101012Z LOCATION: SEQUENCE:0 STATUS:CONFIRMED SUMMARY:Test event TRANSP:OPAQUE END:VEVENT END:VCALENDAR """.format( uid=uid ) ).strip() ) href, etag = s.upload(item) item2, etag2 = s.get(href) assert normalize_item(item) == normalize_item(item2) vdirsyncer-0.18.0/tests/storage/conftest.py000066400000000000000000000051471406140636100210050ustar00rootroot00000000000000import contextlib import subprocess import time import uuid import pytest import requests def wait_for_container(url): """Wait for a container to initialise. Polls a URL every 100ms until the server responds. """ # give the server 5 seconds to settle for _ in range(50): print(_) try: response = requests.get(url) response.raise_for_status() except requests.ConnectionError: pass else: return time.sleep(0.1) pytest.exit( "Server did not initialise in 5 seconds.\n" "WARNING: There may be a stale docker container still running." ) @contextlib.contextmanager def dockerised_server(name, container_port, exposed_port): """Run a dockerised DAV server as a contenxt manager.""" container_id = None url = f"http://127.0.0.1:{exposed_port}/" try: # Hint: This will block while the pull happends, and only return once # the container has actually started. output = subprocess.check_output( [ "docker", "run", "--detach", "--publish", f"{exposed_port}:{container_port}", f"whynothugo/vdirsyncer-devkit-{name}", ] ) container_id = output.decode().strip() wait_for_container(url) yield url finally: if container_id: subprocess.check_output(["docker", "kill", container_id]) @pytest.fixture(scope="session") def baikal_server(): with dockerised_server("baikal", "80", "8002"): yield @pytest.fixture(scope="session") def radicale_server(): with dockerised_server("radicale", "8001", "8001"): yield @pytest.fixture(scope="session") def xandikos_server(): with dockerised_server("xandikos", "8000", "8000"): yield @pytest.fixture def slow_create_collection(request): # We need to properly clean up because otherwise we might run into # storage limits. to_delete = [] def delete_collections(): for s in to_delete: s.session.request("DELETE", "") request.addfinalizer(delete_collections) def inner(cls, args, collection): assert collection.startswith("test") collection += "-vdirsyncer-ci-" + str(uuid.uuid4()) args = cls.create_collection(collection, **args) s = cls(**args) _clear_collection(s) assert not list(s.list()) to_delete.append(s) return args return inner def _clear_collection(s): for href, etag in s.list(): s.delete(href, etag) vdirsyncer-0.18.0/tests/storage/dav/000077500000000000000000000000001406140636100173515ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/dav/__init__.py000066400000000000000000000030551406140636100214650ustar00rootroot00000000000000import os import uuid import pytest import requests.exceptions from .. import get_server_mixin from .. import StorageTests from tests import assert_item_equals from vdirsyncer import exceptions from vdirsyncer.vobject import Item dav_server = os.environ.get("DAV_SERVER", "skip") ServerMixin = get_server_mixin(dav_server) class DAVStorageTests(ServerMixin, StorageTests): dav_server = dav_server @pytest.mark.skipif(dav_server == "radicale", reason="Radicale is very tolerant.") def test_dav_broken_item(self, s): item = Item("HAHA:YES") with pytest.raises((exceptions.Error, requests.exceptions.HTTPError)): s.upload(item) assert not list(s.list()) def test_dav_empty_get_multi_performance(self, s, monkeypatch): def breakdown(*a, **kw): raise AssertionError("Expected not to be called.") monkeypatch.setattr("requests.sessions.Session.request", breakdown) try: assert list(s.get_multi([])) == [] finally: # Make sure monkeypatch doesn't interfere with DAV server teardown monkeypatch.undo() def test_dav_unicode_href(self, s, get_item, monkeypatch): if self.dav_server == "radicale": pytest.skip("Radicale is unable to deal with unicode hrefs") monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext) item = get_item(uid="град сатану" + str(uuid.uuid4())) href, etag = s.upload(item) item2, etag2 = s.get(href) assert_item_equals(item, item2) vdirsyncer-0.18.0/tests/storage/dav/test_caldav.py000066400000000000000000000122441406140636100222170ustar00rootroot00000000000000import datetime from textwrap import dedent import pytest import requests.exceptions from . import dav_server from . import DAVStorageTests from .. import format_item from tests import EVENT_TEMPLATE from tests import TASK_TEMPLATE from tests import VCARD_TEMPLATE from vdirsyncer import exceptions from vdirsyncer.storage.dav import CalDAVStorage class TestCalDAVStorage(DAVStorageTests): storage_class = CalDAVStorage @pytest.fixture(params=["VTODO", "VEVENT"]) def item_type(self, request): return request.param @pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.") def test_doesnt_accept_vcard(self, item_type, get_storage_args): s = self.storage_class(item_types=(item_type,), **get_storage_args()) try: s.upload(format_item(VCARD_TEMPLATE)) except (exceptions.Error, requests.exceptions.HTTPError): pass assert not list(s.list()) # The `arg` param is not named `item_types` because that would hit # https://bitbucket.org/pytest-dev/pytest/issue/745/ @pytest.mark.parametrize( "arg,calls_num", [ (("VTODO",), 1), (("VEVENT",), 1), (("VTODO", "VEVENT"), 2), (("VTODO", "VEVENT", "VJOURNAL"), 3), ((), 1), ], ) @pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.") def test_item_types_performance( self, get_storage_args, arg, calls_num, monkeypatch ): s = self.storage_class(item_types=arg, **get_storage_args()) old_parse = s._parse_prop_responses calls = [] def new_parse(*a, **kw): calls.append(None) return old_parse(*a, **kw) monkeypatch.setattr(s, "_parse_prop_responses", new_parse) list(s.list()) assert len(calls) == calls_num @pytest.mark.xfail( dav_server == "radicale", reason="Radicale doesn't support timeranges." ) def test_timerange_correctness(self, get_storage_args): start_date = datetime.datetime(2013, 9, 10) end_date = datetime.datetime(2013, 9, 13) s = self.storage_class( start_date=start_date, end_date=end_date, **get_storage_args() ) too_old_item = format_item( dedent( """ BEGIN:VCALENDAR VERSION:2.0 PRODID:-//hacksw/handcal//NONSGML v1.0//EN BEGIN:VEVENT DTSTART:19970714T170000Z DTEND:19970715T035959Z SUMMARY:Bastille Day Party X-SOMETHING:{r} UID:{r} END:VEVENT END:VCALENDAR """ ).strip() ) too_new_item = format_item( dedent( """ BEGIN:VCALENDAR VERSION:2.0 PRODID:-//hacksw/handcal//NONSGML v1.0//EN BEGIN:VEVENT DTSTART:20150714T170000Z DTEND:20150715T035959Z SUMMARY:Another Bastille Day Party X-SOMETHING:{r} UID:{r} END:VEVENT END:VCALENDAR """ ).strip() ) good_item = format_item( dedent( """ BEGIN:VCALENDAR VERSION:2.0 PRODID:-//hacksw/handcal//NONSGML v1.0//EN BEGIN:VEVENT DTSTART:20130911T170000Z DTEND:20130912T035959Z SUMMARY:What's with all these Bastille Day Partys X-SOMETHING:{r} UID:{r} END:VEVENT END:VCALENDAR """ ).strip() ) s.upload(too_old_item) s.upload(too_new_item) expected_href, _ = s.upload(good_item) ((actual_href, _),) = s.list() assert actual_href == expected_href def test_invalid_resource(self, monkeypatch, get_storage_args): calls = [] args = get_storage_args(collection=None) def request(session, method, url, **kwargs): assert url == args["url"] calls.append(None) r = requests.Response() r.status_code = 200 r._content = b"Hello World." return r monkeypatch.setattr("requests.sessions.Session.request", request) with pytest.raises(ValueError): s = self.storage_class(**args) list(s.list()) assert len(calls) == 1 @pytest.mark.skipif(dav_server == "icloud", reason="iCloud only accepts VEVENT") @pytest.mark.skipif( dav_server == "fastmail", reason="Fastmail has non-standard hadling of VTODOs." ) @pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.") def test_item_types_general(self, s): event = s.upload(format_item(EVENT_TEMPLATE))[0] task = s.upload(format_item(TASK_TEMPLATE))[0] s.item_types = ("VTODO", "VEVENT") def hrefs(): return {href for href, etag in s.list()} assert hrefs() == {event, task} s.item_types = ("VTODO",) assert hrefs() == {task} s.item_types = ("VEVENT",) assert hrefs() == {event} s.item_types = () assert hrefs() == {event, task} vdirsyncer-0.18.0/tests/storage/dav/test_carddav.py000066400000000000000000000004251406140636100223670ustar00rootroot00000000000000import pytest from . import DAVStorageTests from vdirsyncer.storage.dav import CardDAVStorage class TestCardDAVStorage(DAVStorageTests): storage_class = CardDAVStorage @pytest.fixture(params=["VCARD"]) def item_type(self, request): return request.param vdirsyncer-0.18.0/tests/storage/dav/test_main.py000066400000000000000000000025271406140636100217140ustar00rootroot00000000000000import pytest from vdirsyncer.storage.dav import _BAD_XML_CHARS from vdirsyncer.storage.dav import _merge_xml from vdirsyncer.storage.dav import _parse_xml def test_xml_utilities(): x = _parse_xml( b""" HTTP/1.1 404 Not Found """ ) response = x.find("{DAV:}response") props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop")) assert props.find("{DAV:}resourcetype/{DAV:}collection") is not None assert props.find("{DAV:}getcontenttype") is not None @pytest.mark.parametrize("char", range(32)) def test_xml_specialchars(char): x = _parse_xml( '' "ye{}s\r\n" "hello".format(chr(char)).encode("ascii") ) if char in _BAD_XML_CHARS: assert x.text == "yes\nhello" vdirsyncer-0.18.0/tests/storage/etesync/000077500000000000000000000000001406140636100202515ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/etesync/__init__.py000066400000000000000000000000001406140636100223500ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/etesync/etesync_server/000077500000000000000000000000001406140636100233115ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/etesync/etesync_server/db.sqlite3000066400000000000000000017300001406140636100252050ustar00rootroot00000000000000SQLite format 3@ *{,y*. @J  [d!H  2 s ? R  [BEAjournal0008_remove_userinfo_deleted2017-04-29 19:30:33.181706=;Ajournal0007_auto_20170407_13252017-04-29 19:30:33.133852=;Ajournal0006_auto_20170407_13042017-04-29 19:30:33.0814523'Ajournal0005_userinfo2017-04-29 19:30:33.000702;7Ajournal0004_journal_modified2017-04-07 14:20:57.7653393%Asessions0001_initial2017-04-04 09:43:00.450793:5Ajournal0003_journal_version2017-04-04 09:43:00.416719=;Ajournal0002_auto_20170217_16162017-04-04 09:43:00.3746522%Ajournal0001_initial2017-04-04 09:43:00.347587?;Aauthtoken0002_auto_20160226_17472017-04-04 09:43:00.1937164 %Aauthtoken0001_initial2017-04-04 09:43:00.095462F SAauth0008_alter_user_username_max_length2017-04-04 09:43:00.042865K ]Aauth0007_alter_validators_add_error_messages2017-04-04 09:42:59.991999A IAauth0006_require_contenttypes_00022017-04-04 09:42:59.957002B KAauth0005_alter_user_last_login_null2017-04-04 09:42:59.949156@GAauth0004_alter_user_username_opts2017-04-04 09:42:59.914002CMAauth0003_alter_user_email_max_length2017-04-04 09:42:59.876118HWAauth0002_alter_permission_name_max_length2017-04-04 09:42:59.829732H%GAcontenttypes0002_remove_content_type_name2017-04-04 09:42:59.781039AGAadmin0002_logentry_remove_auto_add2017-04-04 09:42:59.7077390%Aadmin0001_initial2017-04-04 09:42:59.671439/%Aauth0001_initial2017-04-04 09:42:59.6334837%%Acontenttypes0001_initial2017-04-04 09:42:59.560478 'Ar^''7journal_journalmember3django_content_type 'journal_entry+auth_permission!  auth_user&- django_admin_log/django_migrations+journal_journalN "weQ"<'journaljournalmember journaluserinfo journaljournal journalentryauthtokentokensessionssession%#contenttypescontenttype!authpermissionauthgroup authuser adminlogentry !vdP;! 'journaljournalmember journaluserinfo journaljournaljournalentryauthtokentokensessionssession%#contenttypescontenttype!authpermission authgroup authuseradminlogentry ! ^o\G/p]M:' y ^5 delete_journalmember!5 change_journalmember / add_journalmember+ delete_userinfo+ change_userinfo% add_userinfo) delete_journal) change_journal# add_journal%delete_entry%change_entryadd_entry%delete_token%change_tokenadd_token)delete_session)change_session#add_session1delete_contenttype1change_contenttype+add_contenttype /delete_permission /change_permission )add_permission %delete_group %change_groupadd_group#delete_user#change_useradd_user +delete_logentry +change_logentry % add_logentry   3h.G ddc3bcea4a490cfd92e818d45a91d7975854f61db11e0f44a08f5bbbfd374a17K-F b52e16febf7ba901d8d654c72da2c285730f4611f440f86330499e6c98eb93bd_)G 8ead78ce9d3afa27f4117ff7af7d5e998c43053b6db1daaed8bad4d5edb69893%$G 610f0c87b6d74cc4ba60719f31f2000fadd8c2ebaba4a67449c58ce2b488b212 G 38aa9614b7d5506e450628f367a02cefd1ab86f7fdba64ebcfea27cc2c8dcb5f 7q)S ~ 7 a  C l % N w0Y;eFp(R z3G 382e30052f6524efa959db507de0c1ccd888e9a8c3f82e6ded5269c95d112465.F 373304895ff9ecf452ff59288231adc1d6e62de4b5d8b6f82e662ebae2c0ffb2+F 36850718f8afee5a85cee04f06547d33702edca01a2ff109d066df20a80fffd5 G 359595ca2eb17fa8c075bb987aeb678b37a95f3d15edbd38d49fc084a0ab1f4eG 355e956238901f8286c90711c0e486983594ffae3c228d39357453b311bb4cc65G 34a71c72e78457b9a0558986c652d80e758061c2624f2c4845a5da1d32ca7dc9GG 3468d3d10271019d2d71f129a758ef86ae625584c2e6da4d7f32cda369eed45cF 3444236b55712de38453de6a4889065953733d403ca937537b1da028b0d40de0(F 33d29259ff06a78bad47b3fde54a35bd017e611e149d38fd2131354664c2268c~G 32b84a0dc6a37429ed3cc84a2942408a0caa0e0fe28c8e7ae1947532c74cce7aF 319162b60728c960105db39d96520d779422dede28d9593ff7ceffaf21cc3f85]G 3135b2d726c22c150a19024bd664acc72c7dc983ae891ef5227d59bd1be84b40F 30867449923bee60a535fba6a30b4952ce3d7d8636fea946690ba55ec1f9c200NF 302463456d8a3233048c315376a3c23c822abbaf03d6cb128bd7b02c9d40bb69LG 2ff3f4969be35eea93138a51218d4b54bd8c53f36ef760e33065353476f7d3d0G 2dabb147d02dc8b51d973b093332a3152424113766f3900ea702f46acbd0ef31)G 2bfcf22cc4a2f7953eb39fcca08ca0f644b4fb9bdbc47adbae6351798ab01f68+F 2bd9c2d9440de2b5c381372416ceac901d5af68ee1b768acfd4b7191ea015dce>F 29767b785e6f896109e091e581f00877bff26324fe43d6e205139a41a851f6e0sG 2904f1d809246e937c62c13c109f5e96eaec7b838bce0fd1816bc1b9f1247e5dF 28d97c90173803ef83cadc3e2ba8d6f4f2500c56e53713051fc431d5c43adb11G 28b326740bca3792bcc83c814d272ef26230ee00399e5915bcf76cd1966181f0G 26279986ef75d5bfa21ac9cac4ede057085c7392a954f3ab2dc328c281e13738F 21e8468798aef41bfbe71fc90637db22ecc789506287f3e551f49161306befcdDG 1cb652241d60e6720b31d947b94947a2f358800b3b567652ac61285b02790c13G 1c2b401a0bed6050819f14101671c4d9199942c22bdafc3160017504c294b5f0F 1bdd160848dd4c54b63997728b6073d587d20fa6eba84a893465c21b833fd1d1qF 1b7fa5879a7b26d7bd4d1aafc97e442bef31ce68769d7a336773ac1105949eb2`G 16d4f4ec895bde138348e3bc733b1613c9372445dc51dca585aeb13f65900a70BG 1642fbfcbb453846983e9659a09628f6ebaa45c9f677dd4d43a9edd9de4b5f1cF 14f02e1a753b21fa97c6310768eeba4fb73b0bce5ad3ae96eb66bbb1b443f3f2?F 14e382eeaa90cc239168886cf2010f3890816fb2b18ef19809b9369898942c3fCG 1452ce5a34b8bcbd19e44614e916e2990d93acfb5c7eb4541c1ca1ad2c3f1c75G 12ef723083ae5dd08ca96084bfe557da50546cb0f0f9bf4c2faf92479cb7dc6aF 1255419f82bd5cd8efecdda06a9a0d750ab7552454f445d028f40820efd06412cG 11e5ec89aaa4aa0f8b39c39a71c2d9ad803d0578d4fed6421a3806658d5444e6F 1030389a6172dc23d8bdf03fe71aa3b3869535ded493541dbdc9f67bb4ca7fdf!G 100de45b359adcfec1898f25ecf636856c8fadb090dd6a5b012778ecf57289f7"F 0df68f8548e284d76e7338804a4d6934cf9e1befa82f74fcaceb436fc266f689F 0d4f84a2b76346897f3cd6c2cf5668e65fc027d08860d28e6746d68086f71134}G 0d43ebef4f6dced888a8851bf142e4278872cdef42830e01075fc14e600940dcG 0bafb77bc8c504c18b0ccc0dbb328aae8aa5f033cad82ead9c5e610220957c31!G 0b775dbacca2d07e555da4a40f3d7fcc84da8b72f98d506416e8aea713e7db64F 0b76e2683fcfaa44d2968ca027dc6a4ccb3b01123198156d125e309797b516ccztnhb\VPJD> !                ]]  'A ) Apbkdf2_sha256$30000$tcIsGM44vaZd$FggkGCBchpN5y1ubQy2Nyr/y0P9G4Tf74fPDVeSQEYg=2017-04-04 10:01:00.046102test@localhost2017-04-04 09:50:39.070391test        $ X Cl f d A4N@'--tabledjango_admin_logdjango_admin_logCREATE TABLE "django_admin_log" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "object_id" text NULL, "object_repr" varchar(200) NOT NULL, "action_flag" smallint unsigned NOT NULL, "change_message" text NOT NULL, "content_type_id" integer NULL REFERENCES "django_content_type" ("id"), "user_id" integer NOT NULL REFERENCES "auth_user" ("id"), "action_time" datetime NOT NULL).SAUindexauth_user_user_permissions_8373b171auth_user_user_permissionsCREATE INDEX "auth_user_user_permissions_8373b171" ON "auth_user_user_permissions" ("permission_id")(SAIindexauth_user_user_permissions_e8701ad4auth_user_user_permissionsCREATE INDEX "auth_user_user_permissions_e8701ad4" ON "auth_user_user_permissions" ("user_id")ZmAindexauth_user_user_permissions_user_id_14a6b632_uniqauth_user_user_permissionsCREATE UNIQUE INDEX "auth_user_user_permissions_user_id_14a6b632_uniq" ON "auth_user_user_permissions" ("user_id", "permission_id")-Y-aindexauth_user_groups_user_id_94350c0c_uniqauth_user_groupsCREATE UNIQUE INDEX "auth_user_groups_user_id_94350c0c_uniq" ON "auth_user_groups" ("user_id", "group_id")t --tableauth_user_groupsauth_user_groups CREATE TABLE "auth_user_groups" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "user_id" integer NOT NULL REFERENCES "auth_user" ("id"), "group_id" integer NOT NULL REFERENCES "auth_group" ("id")) ?-!indexauth_user_groups_e8701ad4auth_user_groupsCREATE INDEX "auth_user_groups_e8701ad4" ON "auth_user_groups" ("user_id") AACtableauth_user_user_permissionsauth_user_user_permissions CREATE TABLE "auth_user_user_permissions" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "user_id" integer NOT NULL REFERENCES "auth_user" ("id"), "permission_id" integer NOT NULL REFERENCES "auth_permission" ("id")) = 99?tableauth_group_permissionsauth_group_permissions CREATE TABLE "auth_group_permissions" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "group_id" integer NOT NULL REFERENCES "auth_group" ("id"), "permission_id" integer NOT NULL REFERENCES "auth_permission" ("id"))3G!indexsqlite_autoindex_auth_group_1auth_group!!ktableauth_groupauth_groupCREATE TABLE "auth_group" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "name" varchar(80) NOT NULL UNIQUE)!?-#indexauth_user_groups_0e939a4fauth_user_groupsCREATE INDEX "auth_user_groups_0e939a4f" ON "auth_user_groups" ("group_id")K9Eindexauth_group_permissions_8373b171auth_group_permissionsCREATE INDEX "auth_group_permissions_8373b171" ON "auth_group_permissions" ("permission_id")K9;indexauth_group_permissions_0e939a4fauth_group_permissionsCREATE INDEX "auth_group_permissions_0e939a4f" ON "auth_group_permissions" ("group_id")Mg9indexauth_group_permissions_group_id_0cd325b0_uniqauth_group_permissionsCREATE UNIQUE INDEX "auth_group_permissions_group_id_0cd325b0_uniq" ON "auth_group_permissions" ("group_id", "permission_id")P++Ytablesqlite_sequencesqlite_sequenceCREATE TABLE sqlite_sequence(name,seq)Y//atabledjango_migrationsdjango_migrationsCREATE TABLE "django_migrations" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "app" varchar(255) NOT NULL, "name" varchar(255) NOT NULL, "applied" datetime NOT NULL)  :u&: $ n~rG2+## '<"c3oindexdjango_content_type_app_label_76bd3d3b_uniqdjango_content_typeCREATE UNIQUE INDEX "django_content_type_app_label_76bd3d3b_uniq" ON "django_content_type" ("app_label", "model")wJ9'indexjournal_entry_ba73fb5fjournal_entry&CREATE INDEX "journal_entry_ba73fb5f" ON "journal_entry" ("journal_id")pI9' indexjournal_entry_9871d3a2journal_entry%CREATE INDEX "journal_entry_9871d3a2" ON "journal_entry" ("uid")HK'Iindexjournal_entry_uid_6418b194_uniqjournal_entry CREATE UNIQUE INDEX "journal_entry_uid_6418b194_uniq" ON "journal_entry" ("uid", "journal_id")mG''tablejournal_entryjournal_entry(CREATE TABLE "journal_entry" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "uid" varchar(64) NOT NULL, "content" BLOB NOT NULL, "journal_id" integer NOT NULL REFERENCES "journal_journal" ("id"))=;Q+indexsqlite_autoindex_authtoken_token_2authtoken_token#=:Q+indexsqlite_autoindex_authtoken_token_1authtoken_token"P9++Wtableauthtoken_tokenauthtoken_token!CREATE TABLE "authtoken_token" ("key" varchar(40) NOT NULL PRIMARY KEY, "created" datetime NOT NULL, "user_id" integer NOT NULL UNIQUE REFERENCES "auth_user" ("id"))1/Eindexsqlite_autoindex_auth_user_1auth_user(.tableauth_userauth_userCREATE TABLE "auth_user" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "password" varchar(128) NOT NULL, "last_login" datetime NULL, "is_superuser" bool NOT NULL, "first_name" varchar(30) NOT NULL, "last_name" varchar(30) NOT NULL, "email" varchar(254) NOT NULL, "is_staff" bool NOT NULL, "is_active" bool NOT NULL, "date_joined" datetime NOT NULL, "username" varchar(150) NOT NULL UNIQUE)%=+-indexauth_permission_417f1b1cauth_permissionCREATE INDEX "auth_permission_417f1b1c" ON "auth_permission" ("content_type_id")A$g+}indexauth_permission_content_type_id_01ab375a_uniqauth_permissionCREATE UNIQUE INDEX "auth_permission_content_type_id_01ab375a_uniq" ON "auth_permission" ("content_type_id", "codename")#++Etableauth_permissionauth_permissionCREATE TABLE "auth_permission" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "content_type_id" integer NOT NULL REFERENCES "django_content_type" ("id"), "codename" varchar(100) NOT NULL, "name" varchar(255) NOT NULL)4I!339tabledjango_content_typedjango_content_typeCREATE TABLE "django_content_type" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "app_label" varchar(100) NOT NULL, "model" varchar(100) NOT NULL)?-!indexdjango_admin_log_e8701ad4django_admin_logCREATE INDEX "django_admin_log_e8701ad4" ON "django_admin_log" ("user_id")?-1indexdjango_admin_log_417f1b1cdjango_admin_logCREATE INDEX "django_admin_log_417f1b1c" ON "django_admin_log" ("content_type_id")     ! lJ( l> g ? ! s Q )  W " 3!5? delete_journalmemberCan delete journal member3 5? change_journalmemberCan change journal member-/9 add_journalmemberCan add journal member)+5 delete_userinfoCan delete user info)+5 change_userinfoCan change user info#%/ add_userinfoCan add user info&)1 delete_journalCan delete journal&)1 change_journalCan change journal #+ add_journalCan add journal"%-delete_entryCan delete entry"%-change_entryCan change entry'add_entryCan add entry"%-delete_tokenCan delete Token"%-change_tokenCan change Token'add_tokenCan add Token&)1delete_sessionCan delete session&)1change_sessionCan change session #+add_sessionCan add session/1;delete_contenttypeCan delete content type/1;change_contenttypeCan change content type) +5add_contenttypeCan add content type, /7delete_permissionCan delete permission, /7change_permissionCan change permission& )1add_permissionCan add permission" %-delete_groupCan delete group"%-change_groupCan change group'add_groupCan add group #+delete_userCan delete user #+change_userCan change user%add_userCan add user( +5delete_logentryCan delete log entry( +5change_logentryCan change log entry" %/add_logentryCan add log entry zz ]] + A63ae6eec45b592d5c511f79b7b0c312d2c5f7d6a63ae6eec45b592d5c511f79b7b0c312d2c5f7d6a[{"added": {}}]2017-04-04 10:01:26.766499  testeZeHH b02e5203a3fad5863880f98adca478d0cee48d716a85d3e856e070413db64b73jGI 5aca58b776f575673d164bb17300935fe2a255ba72c46ad399327a87010562ef% F]A 63ae6eec45b592d5c511f79b7b0c312d2c5f7d6a2017-04-04 10:01:26.762099 +] 63ae6eec45b592d5c511f79b7b0c312d2c5f7d6a  7p(S { 4 ^  A k # L v.W:cCl%Nx0G 609ae73274c4ceed306cda24d2347965d35754021140b5ab49cac4bb02205997F 607b102d060fc3cde998293777f06cedbca210af7c34429dea21189438ac9370G 601d187bd6ca0cef903203800536808a0e8132bdbce9a797e160fb47ed25b847F 6015ece2718f17aac03a8c7f0a653a32df6b29e963615640ec03e43f2f136419wG 5f5e544596eb262fc47075fdcebba1c06b436474607a241436e44ba115551618F 5d9ca188272e57f5daf1358e98d01ac169f3ac665f115d53d369d22b0b55deff[F 5cf4d8a4cf3ce88c6a9b388442f3419fadbacb64fd1e42fc471ac8e15bb66d9eG 5b8a674123d1fcb339c1be090fca20edabd070d5660e701713fc74e1738df383G 5b44b30b3e8fc88e6d89657e45b88f4a307cd230f2e501b27196d271f9574400F 59cd0edb12a2554e235bb45547afc226a75052cbe2238cad74845fac0a8f1c1ebG 58fffc59824c4fdaacf11288f13bf068c36cf5b8ca7775e3a220334434130b47F 58ee5169893bae44cb4f754d8816c762dac95260eac51e3242db3545e2eab877G 58742fc9851ee611c9fb85710a1aeba8a10035dc5de8f0b35252ced9cc118d4fG 57f3b2204c445f9e16208d19fad304f12e8d61c077f4efc190974b261f7d0dc6G 5739e280bf5fcffe4683f54a12b7d5ad369b82761f023c214a21a47e728ed6f2MG 569d7245718f2407bbb2df5782c69d62c1b545c6a0ae5f9d993708c6f232a2c7G 56880a615c1f36e5a28dfd107e56f6dcefb1aab7416789aa57b05ca807c96233'G 560f96f8eabdbaf5202f0db45ef969a68a487aa980a1cb4ddeeb27495f222324F 557aca30c609e4f38a90345684de1fe68afdea4a10fe04b36398ea551acb7319G 5560f9f753599d78929172aad3c213ceae2cbca6db915286fcc95844788b6407-F 5521e29ce7eff0b6dc4a677b4bb44eb840f7e69b608e4b1776f6b44478918c18RF 5405dce151f5894168bb650929f35c53de16879e0231924c5b5f86a9cc4a6782KG 51c97200c3090df50dc50be563afee75ed8d4c38acccc9490a747b0e744be565FF 51702666a993a406abac840ced0d985c785060b58cb01e3c4f58bf3b5ccdeec8ZF 508d749828465793e8b46db6894be42c05d4781ab51630a4c7f5b90625046a3c=G 4fda94e16b68e3ed97f324310802dd6c9ab3c4661a08219492ea06efaac29a37G 4f8233053363737a25f88bc9e979a691fb036e0157fbf35d9d658f37219207c6G 4ebf866a6999f25bfece6c32b0d35abf3db8aeeed616c2298759b1151202141fF 4e7ad20eb30262c4160e1c35c2d57e83b1cd18b04760fbbc8e7f06dff49cc91baG 4cf0fb3e36d92689aadb5a5ffd075cc30d946f6cb447d792f758757646bfd46cF 4c00da451c2499247b5e24cd3ec6f43883240e707761d226b244b4c59a2b3c3eFF 4befe119bcb3a80cbcd462805afbdb40fde654381939ab2fd19e0fee7c9da686G 4b8cf6b895d9cc85f0870a4b12d151b3ef897186acdc62e82fe7efa1a175b795G 4b80d32ac2aa9dd1b326e1a62a5bde4878f698229ce56d91fcc1d51178bc6de6G 4a85434c71eedd09cf7cdb0e90811ca6dacf4b868410dce690137fddcd2bb772F 4a40032954fef3400743a9470c1421b48e5fec796c68029b06783f6a0ee78953$F 4a0559ee50ba7d9404ec5c62835709ac2c3f84dbd2b773d10e9e3095993a9a61HG 498d622c4e7f9a9d387380931fa78d9fd6dae276669d5e986e8c672303ff1081G 494edccafb06478e72ebb6b962465aadde1fca49994bcdc41268e7ff7f9298d53F 48532ee3f2dce83b4b8a3358000b0a20f2263996e5187ad58e891ab0f4cfc937BF 47e175ee995975494a9c6e633e9b1a55a8e6a33ebdcf9d12dc4e785da6f7d0bfF 465809d89ce8482d8f352b9cf14eb42a5b9d7cc9d7ae81474abd1b00481f9876G 456ed933ebf01feb110503b5fa31d2decd24ce2253efa3aec44b7c3abc9bba91F 437903353382b80ee5cc4fcc5a1db9f1bed45eed150f463f71fea1ec0cb7d970PF 4208083b0035faa76af24a5882cfa5328729a8932bd1c7642f0b10d4a02541a30F 4062e64d95986fe41d569c902a145e8fb1028f09aa9817a1b7e24b1fed0ed919.G 401ef11f9c0c94c22ea30abe0188405477b48a9d278450318058a319a58c19d6G 3f8218504bd85b6f3406dd04937b7d50344c59aa6579c314065c59b0c17ddaffG 3d556e82f0d941ce9fe09db04011604d5f4793bfd189d8fcc3f4d8a7a1c6f248F 3ca8b4f3227419f5b1932ff12dd9b27e1cdadb071918df8db57ca9509a0bf3293F 3c7f642c8e96b7220c36a01eab1ff2fe55871937e466651a2af040967db4ef87F 3b5425aa73c10d82eaf7876f3f7c24df39a971db88b44d1c7a9309d2168b2f7bYG 39c52e4aada679042ed5339fc5508da65edc889a218e5762894e0d280cd47fe4G 3993296c1008cc644aa599bd3c31fb5398c901d0652e3c511d086d05d4aed665G 60eb3bc158021ac1739223721d42ca3435fff23a1aa14b15abdb19c3f6c37d19*k\kKE afc4b61f33508f7a55e82669d6c99775f0cdad81e7be31f7e001d0706bca5464JF 5aca58b776f575673d164bb17300935fe2a255ba72c46ad399327a87010562ef }wqke_YSMGA;5/)#|ung`YRKD=6/(!  y r k d ] V O H A : 3 , %     ~ v n f ^ V N F > 6 . &     NJJDCBA====9553//-+)%%%%%%%~%}%|!{!zyxwvutsrqponmlkji h g f e d c b a ` _^]\[ZYXWVUTSRQPONMLKJIHGFEDCBA@?>=<;:9876543210/.-,+*)('?&>%=$;#;";!; 9776443221///////// . . - + +++++++ + ;MAn964lbheh3l4fp47m9u9k8hkx5r180jlNzMzNWRhNmU2NWUwZDA5MThkZTUxZjI3M2Q5ZTc2MTVkNzhkNWQ3ZTp7Il9hdXRoX3VzZXJfaWQiOiIxIiwiX2F1dGhfdXNlcl9iYWNrZW5kIjoiZGphbmdvLmNvbnRyaWIuYXV0aC5iYWNrZW5kcy5Nb2RlbEJhY2tlbmQiLCJfYXV0aF91c2VyX2hhc2giOiI2OWNkZThjZmI3MWRkMGY5Y2ViMTU4MTRiYTcwNTM2MzJhYWFjMzMxIn0=2017-04-18 10:01:00.061682b`^]|WtVlUdS]NSMIL@F8E08(2 10/ 7q*R { 3 ]  ? h ! K t,U ~6_@h Js,G 8bdde3c013703ff8d3e65638a73732353439abe2815c7dbf2dd5937c863832a8G 8bad480e67f9d0ea65354eadf68b8a640cfbe3f215bbc5160ec28603d6fcebee9F 8afd282a38210a8abd7bcfd24a16ce7d3c1815f310c19f9c3acc8f374887511e5F 8a7174e11c40a8d5ec153778d98826ff075cc8f8446d897beef4b9529cf0141c G 8a3af3a8b6bb41586489ea21ffa137622f0c936c379b2f918af616336617a27cG 8908a9ca78d9594ec2396f9846c1e415c78d1b25209a0c800135f7a1532317a0 F 88958c891114d70a9042b196ca47865c8b589971bcd7ff320939918816a49f349F 887416dedad16da63722abf4177c88ca74e4ae0a2e54f3730d2e55a09a3758b3G 871f420df091c9fed6831df41c8e8eba15a2822609b8a27b4b7df81dbe6def82G 86830d6e97aa8294da602655308dafcd41ae8bb82899aba3194c1712ab26d741 G 85f03e0605519a91c6ec92179d00968a90e0c52745a7ac3e76e6a67dacf753ffG 8580135763c43ac3f142d731f9ded5d08c40124f250f872287661292b48589d1G 856d27b4e81bac463e7b4b546d79393d55b1f907343f17a54b8f1416b3b93b04F 84ee3d0acb79ae13cd646a9e85123fadf6c4f608949a403f4fe3bf211cfe205fG 84aef8b0782f26965eb8ae38ca0233cb7b252fec99c1e71a40596043660962b0:G 83944924fed9cc451d866127cb4c7019e4b25148c3d91a41dd911999d970e921G 8276efed7cf66b3e774c96fcaa0c304e3b810b1a4686e620b601c977e2f9fb8cG 8243d6da987eded17ccfc6ef2e6725659000149dfaf5b03c19af8ee799339274G 8207814765b1d2e9efedb5826b99bdf12e329c5b83127c0999b7b8b3bee4bc2fF 81eefdb137ccc2d662a966678aa369f3d82f19c151b615b9249986d9a62d57a2"G 81e9b8460020836e1b6d9ba363e9f312ac561811526137fe5af601340b705849F 80a96d9b108eff2ce0e4515b0f6d579dd410825bfdc404d4f0569f5f72f68512;G 7db06f06eabe006b9fa6cb7c602674858ca66ea3e0641d723d20515c1bca593dG 7c1234ea538417e501581f75439ab526170a2df69f63a506c2fe98477e939835 G 7b681dd8837cfffa2a8de03354f225e087387cdbb7ce69723a61f65120e2ea39F 79fcc86191625a5ebd085c54d7e55230cdacb334c17ee28d0b4432116575bdceG 79afab597c78b4fc2cd818a3774c7bad77387b7c43d7cbcf8734e6f6a352fdffG 797a3816522aa46a7f0f11959b661ba82e8af436993fc760d77f6906d55fb443F 79209e600cbeec61f77d3ac46274c8833ae649c7ed58d4f91137cc2015faa704G 77d0bb268a532473f58b84232ad6aab954b38aaa121a8f37090f4453a2fb6b94G 7795e8f8dbcff5c24c5785bd04ad970d5c94afbfd63d0d1132ba20a0f8c3cb15CG 775c36dd31668e96dcf106e6d33985ad1f100775b3e99b33ae1a5e9ec3ee246cF 71c6232b836a01c62e255dc74b3b4ac2838f7e987a3efe6a91befbd477869911IF 6f5e1eed70bf280344c99b5c7b713ec6198c33e3dafda3100548a75eed9c30c8yF 6ddab0d1dc85ec965cee35e95dee3d55a79a9053ccc28399aec0797cd0938dc22F 6d8399e1178d55b097bf0120a86db479d8d35c481874146e913d3a47c5be6030G 6c0987d0229fbc7897bda1b8d05a2740dbb45c8dd298ff5f33a438d27d1b00696G 6b19769bbe803a8e8c77e9e9b8f8351de2a836aac93477147600a2e4d35e74b6F 6b07b9574fca380e9fdeadb8f158270f391717032dbc43070952eba77d9e8bf9vG 6ad5d857467ee3977b86305a1a93e087f69dc6584bfff0b88b9424cbd169f2f9G 6a3f34ebad9526ecc66678bf6bddcddc7e77f47f4b75713220b42fcac687b628F 69fcbc0043b27d480b891e940c65353d61198d567c10b1e3e31c06c06379bac8F 69efd71ed8767ca2cab57352f4218d0e136224cabe0708acc38abddce4cb46d8oF 6998fe2aaf172a2b4fcc3e84e8ebe36bd8154b158b4adf1380c3f30b4f0bac1afG 697d74c312cf412cf9c6ffa0c8d3993daaabd42f1a806c48867ebef94160b14fJG 6924ebe48d68e5138bf518c57048eacce50eec8b7b81a92c6d7633acf290c772G 689e1b5cbe0ba6e30972e6d627c04e68b715288012ca8b09cb308aa340715c4dF 67444822319aefcd1f96becf0123284d8deda0c53b3976caaa03476fa444aa72%G 664b1b8df9f431b08a4d78b425ac0316d3fe25de51e6013fe2ca2fbba1aca575G 65f82c8e12a0e84af8bf5adf1579c332c777ff1db3486ad0795f993efe8ad3faG 658bf080f5c5bd62b201cf10d56f7e243a92ef3d4d32456e55e54e9134fb2d7eG 6503175bdbda0e4d5638d15ce109ce9f966a691e2508ffbcc2cdb4cd11463ed4,F 649b5e5a3ded80f2583800ee64507e8cbb52288f688cc75060052c6fb3bb2834mF 631dd1df40e9dcb312fb9a71bfc8033b22dc6af311e534819bc17fb3d62abff2F 8c6c2b8956753f669fd4725b5d5ed320175f401721883629062f41ae6872daab) #M n964lbheh3l4fp47m9u9k8hkx5r180jl A 2017-04-18 10:01:00.061682cOQY[X_daTDIPR=A?>dAO[X<BQ[X67=cAOX 7p(Q y 1 Z  = g H p)Q {3]?gHp(G b471c54f6be8b8a651b186fbe6bd9a8ff48b92333ffbc292ee8057b0c2fda193G b3f938600adbe9d80525fc39c9838f1ceb0d287c09f65e133dfade9a11bfa720G b31e45cd0d0316064b4c869400b644d9e3b2570f818499b47fb8c4b43d3e581dG b2de72248b2a5beccdaf4b1083ecb957a467af7ddc590eb4edfad7ebdd0c3372G b1ef97cc86cf522aae5dc334df7ed7e31af06d90ed4cfe726933378af88e9e85G b189bca3703142eb6444c32774842c71068fd973bccb02fb6fbb6da038b2e2ab;G b082109b417feb1f94043ddca5c66cc1f88bc5240386f6abb587239a1f989a0eF afe96d3650acc0fb4c044866b93cbd1a2d8926df7afb7bf46d7506d34f837721OG afca9e51cb2eea4a4459b21327c6bf83b285df58e47d039cd5e37eada5519465G afa46efdf35a5e0e430c8f4f8b689eca0ab591a4da6804b9932549184442c255G aec4cb9e430485e9b2b777ca06f24f8eb967d6b1c2dbbde7357c034f203174b4 G ae33690101d2980e5c2023d14ba2bf4402129477138e1e69a07bbeb67c47e0eaLG adf88dd9ece9481bf026f5c523bc54779977a4def8416a4b3432078b3b4f3554>G adf3751baaa170cca881777342df600418b81f8d496bb4cb143c5b5ae65cf825G ad7d5380e5d63fba08a04c4effbbe945805ddde72960ff5edac66b4637622094F accb4a8c00d2ef6bd887a6d09ef7d7a2ff7d7e55eaae7e30a4b512898e4238d5F acbf26437b975c04909a3d82b35a452dd2c051bea26a70dc0c118ea74c22df5bF ac1ef50cafd69c4e620591f1fb2df062b6bacb0571c4be58cc756d359e253ef5MF abc219aa3201beee75280fc0bfc9e3a45711cec478e0ff919544eb9c780737d4JG aa227c5ac4f4310bd5ce4fc0166db60a20e59e256434f9b72508e04390788df2G a90a6b44ab15c1f3ca08c8c6ac567faeb6afe4f64a8c2a69b370258d254334aeF a89b6e2543413756c8a3717e1e07d11081d97736f8566fc80ffd67d68c20f1e01F a866e8c2a8067195859241fa8e1671c5aaeb53608d8e4f39116445c6d5373cfdG a8449f785d9b6245d6ab23b5c88f1cff5b6d20a066339d878eea9cfacaa361afG a6c9661b31614c60a2a58f57656fd7c6a09d60f519424e59a2d5cf4ed5cf64f4DG a603c4228cc10d9255ad0067545b5b94ab7ea4dcaa996303f080f610d3e90a7fG a4d0f4e6d58bb4e21d5f338350e9c556e5b42b4045c69678660b8a0750ea79be#F a48c5cc38e8ef3fc7ebdb56dd9f5c51a2a7e4010f376272080d8a52d6b146801EG a43bcf8e286d9b75592ce51c33b6cf5537b1b8c2ea7b45040bec81546ee08a86EG a42682a4de4748f80452dacf3938d4642afcb1a3a22ab73de78cb2758a21653eG a2adbc9685264e448772ee3481d8b3868fbeb534ddf3c9ed6c252676bf606d22G a25ffe70cc4edeebad9ea2842bc7b3d706509446e0e6ba2e02d456d271015623G a070c5bcf2ce534698d2da7dc73608ff7df9d0fd66bb005f042310ee0d9f3daeG a0476d08c4fd40681a3dab6253e5d6c51050db990e92a53f4be6ea8bac0d7a1c4F 9f1fd3d037d12a0927b0cdce7aa01bc14a0fd37d2e320c8262932be449c49c42 F 9ee5e6f2ff5d78a4c978280d0a35ebf6ad7fb12b75707cf8e48ebc02492f7bb9*F 9ecd2726fadf2b61d20e1a70a3e6ce5072be15b3d55a02f1b477b650e5bff475^G 9e2b07ae2aa9472cb2f4b338ee14d37591cfe94801908157f2a69bef5aa69d5fF 9a344c4069e977f37e4e164060a609c7d43d6bdfe613bffe8fa9db97fdc19290:G 9a00114455bd1ade09a2ff97c384236bf655a90a21a6fc57dc5ec41ba5a318b4F 9989778a8c76e2d74b957135530ecc2d1e561f8da8c06152e70425d9c351673dF 998802a87c82863b8d9009160820a85e2d9054a1d0a849c85f3d671a48db1560lG 98fe36f65b60004810221adb678907785c9e80569081a52680512d025ae2bea7G 98edfc06d826f15381f376ad18aeec928dfe348c49f98c66cf7ee437f0a4942eF 98dd373e95d9e798e529e61800ce76c94022540641792355eb4fb5b8d5f76b36jG 988472a0d490ab4b2538dc19404f3877f78e50d9a61ebce80e9fa48336d90190G 96c6acb39429b95ded6ef947bb50cf2856b39556489bc859480233442932bc96G 95ce19c921a571bc14ed150077217bfba818b9bd7edd7101ee9d972be2ef8be4qmbuYtכDea{}91v, kGHoQoM{ B.)UE7 3"+x lddf5c0cb6b7915c609948dcf636f62f4f66de40a0aa72af99587266dcc11d777;l"ℓeA`V6.#;2ÎMgPLQ$yZ%WmqB;`V+3cF|˨l]q\0.`MCݐ%Wb } ">6x{u[-X^{d6*smBw/(Sqm8n9@RءBZa2<#?wr "ޛ%jE*^u•_h`K?2n@bD,4k8.u2>t2?m#9*+SC+x l48494d18e52c545321d50e465e9c75a62c9ea2a3b4aad3c4e66b8bef2ae235a2=xA+W'<Je(6s}3rHc~{9x {l m8iw￶{ ϲ=V{kV-tboI-kD3=#VUrlu3wh(#m].txШJ3=Ul>>mPs3 -#F>3A?i.Ő!YӃV߃vNI'_$2bCY,ѫzf+̭&`.;xELCU?*D5?) CG-`pnm^?cǗ Sk\x!f-ῐ E/`(锰a)eҷt9,K^<|ry_)UrCAۋWDye!㷋 ‘UEUuACH.'حnL}гx4LlПu ;sG؁t6.iXBD@3+K5)9X%My ehAޱju4/ H~ZI丗,6cRNc"/$(_/(~p5ֵ0|KTe*"ʜB}h(菝jQ`+h@ݝItܓҮLY֌P6 q!++x l34ee8b682aa4db03397375fe1371679e44c7e592d302418af82f86c46e9d54dci{Dm6 /ۻA$}>H!%Mx0MZ2)Iۘ6nةKH[,h:IϤ6Sxm,ZU{'8 ^\B:=;Z* 7T5'QϚyZ- of5![E=}t_x\݃/uX.gpߌ<6sʶ( "ңd=YhdZ2B7-qpU54+Yoа;6|@H@LTD&K \d)GO l6J jψ#M$jjgؒKtY~[`;a|T7$eFg'i7C xcC<ת8 +x l9ea1ae3ba9c8c73b9babb71fd76c5b85c70b2b14d734718fab014eedf18cc5f4F~`bu.8 PRsĉȚm拔gz^l裀{j'hM)Y/-BkERSwo8r"I;M Vh6$zDXӑ̕? hpE ^Gl5i$^ we׉6avw$)9 \wn5qMٟ-ٌ ̋[;e^z1Qaz wEHT36qN2_]l{E:e _0&V5a_0BWX<@v#gdU[WZ6:f%I7[ -yj NRq1m^,!xʳ+x l14b680da4924a7624becc663a44861be3bb29b4417b39aba6278beea8f02fc5dEf DomYsca=0F2_<",H^]{9~ &i6"ś`Na'Pɐ5Lb[Y|9pχ0&3X7漦eVY4aeJbjd/u] Ya4eWmS KnP A1NN"N eX$NٻiĹ[(NGF_hy8[#ƁͼoL_a4qtuWDDbMyN~2ZZ)Lԗ+bE~ wwJNɢgkzH ]`(ME'P'qhb)?+}YCb(aGj\|,sm1F%5 _/kŞGD)f_vVk+ ( #(x ldd3af1c0927247ff9eb43cbda4b5e17dfef819798bf67f559fd9492bdff3f624(sXթ·ek"fs_TMJg)ݵ`w*b Lk-ɱq}1Qzh(vvq~CXeQІ0"Pdo*UV>1 4V԰08aP{aY=(CX^GӶJ3DsDѓZОa{+Vp.w%@Fi7fɒƊƳkw e:Ii^(oyQ8 ReC6\ T AzEqCdžRa#(]234h.h{mFG EB^~v;2Ћ/zp-K(D7ෝ6yb1  %. }ԍOO|+Zgq1!b]qgx'5:/)zD OэSt FEJX/x l37a66dc58aa3db809c1c82ab4f46bedbab1a11ffdfc50f3c7a94333f3a9f07cc;j]5ڿ_MG0eBgq^ۙR_`57y9yheo/.xB^D1sBc3{4el^'AtzӪt崆P_"6V^z.'^E6&FP~Lљ#痼(;_qtY1,~7ĥ0"DvV+/r<#EwY!!R5脌JV~r[3K TX'ݫ]ő(ƾ$ l{KU&V9uѻ|~u BNvz'5\-D1e@X06~lSWaQ Y&m^.YNd1%=2r%?cS @PQ/x l1ae407b68dae31724172e0a595267c38ee0f7f39f2203dd548d4c7cccc366b82{" -Q5*YST{Dn%8EU*ŋ.T9ي3xL8ιLATJ=_<oܺ~I;wL!"Ő*QZ-cnF7LwlC /Dkw^,6Ki豢kדAK,'ª4hU7 b睕u_Qds^N?5—od5Cv5vq1e[HwE *G9Em[{DП 7'"/Vc/x  le7a19337cfbc7b92da129b556079b392f192dc95856ffa25d72df275fe65d1a8s~F' EV.j=H7n‹I˱k!֗Su`@؋PHQ ?`*Mu^>{d#F[ ,2}6㦖ve=?$IڈDӘUJ?'aFJ#Sȵa;';vAХ䕷LTO0S[qelR@,w` 'u`,җc`gV>nՆ)ztqrY^C5¸,nf wG6,*;Tħ0_=b25<׾~LwD7(iG b,ȵ"Wi>@öN?%yŌC(M ;PQzO"P)Mi^^bQ .q.MC>7#C}/x  le78d41e1b6c427d5a904bfb0bfc089f397ea220b0eda113e58a0ea6e35ae50d0LKͶ].BwIT8L`/e0TAnd-'*=xqF7a h YP\O)Qm?H=a 5qg05z47'8 @NoĢaApkXm8/y/Q광֒%{h[c:jC"DOpR~{S~vRۊ[32~.2\b} ] uv_ Ln.䏃߅&4eռ;Jv tewU3T nM@CCW\PD~6 Mf홀7c(֟;뀉`c5oio#ǂnyቿ\-5M;)ѱu"4q0{d>>@X>Hj.x  l7f8a52e3422edc4735075a02cf1d7da55f3e8cccf9ea72f660e6f9d6c11ba8756~^XG+cۺj> r Sђ/όzFa.Ptbҩ Ga+M܂٣̬)/fM- 8-p*>kLOcE]AlJXח݂FoHv]~K#V°H/=t 0H .x'xj*~XVI_y Zit L< -]4vfkW=%S:UJΔk9 M)sW<pA9vShj]c.22iW\b] @^ΞvAU4? S07'4o-(\:$ 4;Qn(-4{\Ⱦp\Y:CV_jYv{56AƮ.x  l9bfa24fcd565cb3437927119800aaaa47710451bd00eb05f377192becd0fb6db} ZsʓpBfcTgKb~V<㛽`er2iڈ(F} W@8d7Q& v_]\k Tu8wTx|j-C|SѦW?oB3Osn-i5A-lD?kF3q˟ZC )Y?t U% fReJğ$. F!D͡:bl3 WV#@I'"G APЩt욠 /:^X:Ђ b^cȘb%#x k#VHZG9_Dn]IRҝDG/۝oj -k 5knNu|?Ey7@Ά5ĬxE2gNs5/kJ8\mQ0_7-x  leb942c1c220152ad76968a9178469101eb17d73a018c4e13f1038f2a7d75087cwTbI v|EaH'\N8jQ.ǸN 'KyVqNm%uvQ '8?$n8szb,`qhi$>r97Avwɥ[˒N%1Ga&1Ϥ.0%7-*Iw3ʮ D9 /8/p@7 QkQA:$6?67Zz/ULP9.N^MgkKfڧl\Id 8d; rȍ21e 'r,澝sGXun8g }£p jo!F˩v@԰(Ry@Ofh= 1>*Z8=Κؕ|Zc2x l93c09806799dc6aeb4c04ba49e5602749813aa7af5ca07fb0e45c5a1cdf9c32c\\wsȧQ{KC4w{P+\o 6W X- חK*NL/S[߹q|sA9~Xʹ9@ssj83ji5VAYs3*+!ͱs{NA/ YM]囷y:6ڧh#D't֣TW:WfdR=" w]04vr/vf%a"Ld*L>le7Rfz W乊 7XZ(^]m~\4տMBZ A [@J"0jVTUeMblneuF˪\cii4ox|.=ŀ޼Qخx(8pj={jq5p\2x lf3eb1427baaf60a902d267c0e17c9a919badd0d74f46542452a85bb7ae9065e2JFj_8~?0+bI@tLtXep=pq;zd I{Nh̙OY&]sԫ$3 feܼbM׾D㩇 -1݂Jh]j:+|(7ǜ5W\(* h4Tk':{IpH~U58&f4R}&(bdx4}umY%YثI@>gEk ǯ3n㸦˔ ֺB㓽;GJV.4=J׷X跷@cd!Ȫ&cT,1x l8970b385d8057818a7dff2f67875f0a2d265ccb6739924084259bebe9c1fde99um2o<.a!S~C핮ϳB`7=x0a|ЊLl~e|{kJiz$lƓsGIɩZ =[CR&SYh1!'pySEL`( 2{hRaKRhEf1f^#oOF*:2X} oQPhv?ml|un=Io vyMlIѓX3{ "jв?c_h>o*(f~k ԉuj y"<|4$H ϋ?6\I:^Bp7EG$vQ\z"hEv$^n&LQNSП Ѯzڰ5*c$ >`FdgXFM h T&/x l9883f36d317c55e6af4fd84381937f19eeba2901134587e70b9795d3b05633f0ǪqXG v. ztgwk?? ۑcehV17 .ɲK`"ss 1єobƷ~{Y/_PhKOW!Te7n8Uy9*ac[o: ;nx;q|r"_Pb$ 2ޖaSS%.ٓ OӢ:p5j!$ƋYb!d|[g+lM ;`Y b^,lHU$$DfTbȨ1|Ϯ\nicȍrP "u%C2A2 ʊyIJ^ f3ϼ7OlGEXR&#K.lO^eU35V$M aةz|\=\+&REp3+4w(Ch:ٻD R7RDtӼ8g ȣ cF bр@z/x lc65b911572c07b7ecaf2dffcdb3ddafa1ec49bdaa21c0fb449f9f765b7065a88WQ!2oN3V0wuNy/Q8~dn~ ſ?6bQ׌-95$33 R;ň[vn(0[Bs!QD{GZL+;XZ8-rS1ϱGh+b{<(l)SDGT0C L6Ya-ҙ|N{[Y$)mL>z/x l9d19be3d3b4a92d839f044adac053c5c3ee748f9c76c2770cd788648e99fd1ae5&sgeI=d-:/W+MݧurҊIRe]=k|l }I,UC#1L[Gsan%Vf8+)ET!"Z }˿ޢSY[ )Ip HX'2c(ٙ%Av}qD.g6? 5Fw-:[{2Ol]ǢF0I==}4&`C% ^ƪ5ub4c 9o@SpJ,G-r vylF i_-7gjFu 1kՀLiYt5Ev[e-7ia;9ˈ[!DX{v}@6$8|OviN__A˔gz\vՌ{?Մ&QMďO4 A)ןR/ ( #(x  l068abe688283c283e2f2b3e9c5507d356b9d6c1039b3d120ac3128057e671707 .:0g6ȜJSc7jlvlsnHngZg_LsFWtN_=%#М9x8ibnh!z:"º!ɮuK(#EqTtAL8O(T >520 $s˛7LoV8seF9^JP0("l-+!3|D<]3]d}&xd5 ܣu`m2]kk'2}XgѬDu_G5f cŏ4FP"Pгb]Rx p5Xä1^e6^XK_ͅtLDt9e|{a;x l46399819d2e099b1124cfa1796bb0ea01024ad560be15e03f67f32e8be2d937d2V{b\C;=9VK %Ejݸyց[2f EhW(^sF0X"V$T2 V:93%޳.G/ⴉ+\>=u~R|UKZ$ (9J_̩S@Ĉa;ɜFBxkM }$jua_gʫ݊iP]Fab(c@q/OL psQ2C̔^#num~J)i^72 (B0B @a݉ݢ؍7q/lMBľBLXLGo.ob.k;5<=p%϶V~4-E7%D ƙ'T1@26-FXm=%{| vu1ewd/c-E.ݢq\$q<&~ާ/Ao>˟{-a|TR7x lafc4b61f33508f7a55e82669d6c99775f0cdad81e7be31f7e001d0706bca5464@ऋ;(BH{hNJt"7ibn P86+V5&Iσ'VlGxed GQǿO@l 'qVf,R vئ*yMjc|3:` }qFAuJ1_ k+~)a 0~~w.:h9T=c|zy]uTbI1lpzaj@E Q1YLgD _GDI<^VaХ5V62xm3,BHiD8 [)A푰dFvu L9ŝ(BÄ&+1$#؍hQ 1.HG[ϣ 2nͅH!;"8'S IH51}$pO!mƹbI`%$Q'HFAB=Bxeç -s N?Gy?t`iD8k X0e)1CFLqR8v8 skAJy03YYdn ҦRt -a*%Agwa)9m]zSORmHng4ғ4x l37ebce42018f0a4e45a24bab59383ced115e937e5777572562e5162787d8af20į[ﱑ?-$g,?Zɂx_El¢p+י iۦ{:VUaMBI)n緵x e(W ` ּGhBĈH]&#yu,E[tq;[ Sf9IȌ2NHn#ф$;D0 9T/ @GM3a~vmѿ7(p9<Ί>S?su5XEqImyX* cTbӬQ!qVe&…{%pnp2K4x l9b363960d21c65fb3eab5c33967f7703960ecec7c003518fbc11556e01c59b95k ֢;IڠG, } -#kr/!a\\*k !*yTCo)?nsS.P-?oZG筣4Ր.i[ǰrƢžb..hckvwE܀r ̸'|2"]wL`FN\55ӵ,Q{xEK?3ODdSs5%]ȳ{G Y:GcSbձqdM)~EЩf57ilrd~Iʣ8b4_fj.Ur GQ%)֣FH|9^TIڽ?`80-[Xt) .p {FaZ|}3 7p(S | 5 ^  @ i " K t-W:bCk$Lu-G fff264302adbd5aab38cf6a8f3cf3238fcb28874625da6f0dca01709213a0e0aNG ffd470fb8a4064883565790aa17dce09713fda5b76c5020ae973fc23e440340eG ff9afc78579c002a657a51a94ffd9f9f745f1efd594f7750eb10d9c3d648b012G fef2235f8f2b5ea7ba4048dc40a2439b9034a82215735df34ee8ffe9d60f193d@F fd43ff0e83cf331671f66ba3d06b5948ab3533628d82e232c57e658d4c969db0 G fce4567aff60d8ced7277816f1768469355bfddf333b2ac6dac2879f74f21dabG fc74ecffa15989267c295b2ee82c159d3b16f00b956abfb73fd34e2d5a66aba0G fc5fe7898d334d85f5584164cfa0f09598db13a4226cc4725de65a305cf043ccG fc0bbbc12a99c95aa7b92417ca377bc9abc071170f0a3e0bd6930037cabb25adF fb36f9ef1f33329c891073f68b2113626d9577826379ad54d5d822510a2f875bzG fa31455fd2c1c0d8fb0e7bc48956c12f4ed1662917fb2f5ee8e793209c3ff555G f9c4c662dc6104c7694fa3e590000ca4eea3dfbff3e5a014486742490f11d2eaG f9a771f42dabb21bc7c6033a072849a2c7511dbad8d1bd0f7b72221eea869dd0G f9369970f55ed2bb7ca4c0e6528806a0ea445fbeabed7875ac37fab54d1e3a0bG f803fc2eb9ddf16127c8f15f49e743bebcdf6f8752b0ebe5258c72ab4ae0a3cbF f7a08765abe1d287bb10149519f0bed9c6aca223363244160c0f6ea7e7bc8cb4G f736be7f707bddecebf45bc7f160ea3eaf0eb32e9d90ed4a431716135aaf88a1G f71850e1ee049a9f461471abc3fe474ccb1a09de1338d693213acb67f9670eccG f6c092438245ad2a7efd750efc06ced1dc9f9c10356805e1a255306891bfe181G f424121c3d778825164c5a54441a4b03d0280803f1305c2083493bf2a8a11d23IG f3b9bb42750f8b84e9ad355b100c83e7d06723081e497077cf4fcf508bf93076F f301ecaa3eca291a6241e965ac8035de0e2f48417840a3ea3742feb4d8b0dca58F f21357d6aaf8aea2af0314e6d7953991a6ac1e26fa57117d1fdda5e334ec259dWF f11254e639c3a3b6ceafda303298c3359fdc39f3ecf0ba2c4985da2f7c0493ea,G efc5ce3e99d598be6cc6b139cade4a4106ef0ffc456ea64841188942c7a424daF ef33ed5915ea8c34fa559ab014780bdc9c9a007d0800d13254dd4c2f8d7fd2c34F eeea595200e3b03ef48b39a9026b69119d12c18c6ed54410a8446040103adeee6F ee89d0220b2908407970a3511a59bf65ab8c3830d2199b2aec3292fc73da4f77{G ee88ca367e1b1d8da5ba722b3a67b4a6f99aa778ab0db4a2bdca21a236455b6dG ee5515d32e76e35a99ca8512b47e97046df58c7bdad97a8a337256031b5ac2d7F ed9a2fa480742a9231d2c99eaac2c61f7803644ef96a4caa5e928ca0fe3fbcbc-G ed6603755b54b1e47ee382d20cef239b9ac37f9d2a490368769e4cb72cbad596F ec68a2af6ada90fa736c147a0300158572e96e110af46a42a505af8c273b3a2biG eb46b84c282da78a1d85ac8bbd231bdd7cbd9c34c9450caa32a9da089ed7396fF eb28d8f1eae2b40f9d084a8fbb8b710633d5c0bc65640497cdfee8546a3b08benG e9cb562540d22233451c2395961fa72669bb634278d19454cdf84921ad6bb132&F e88f71ebeecb6a0511eb08ac10f13c42a0a7c0788f9d462b2c816c4284e76890GG e68a7ea9f1f07d12fad89ff3fcf3003398982e7dd02a54207f060dec9f30e419G e6284180f7900d0c92db8a0390349495d6f37eb681b592b0007cff5f1b70a0beG e59c6d45a7b659468be0d9fed9acdacf43341baf9f65634ec76605129e5d273eF e58742025f6814f2fdf7f11da56a39f5046056adc9192d7957ec7d79ad89bed7F e57b3bf674d20cc0e4af56a55ac7b16713414cc0a297c4257b6a15c1ad8a0087QG e4a4817a3863052df0ddcc1b20fbad8cbb33e8b6665f491845063ae0e4b9201cG e492307ad876783a867c7504367ebabe2fe38ae37411a307038646c2b0acf56bF e3a86bc0497398fdd9c29b836ba68c9a211e14476dc3ad75181bc7eb2c0d7c27SF e316044e9a7990937a2e3404382fa8fbf1396e0d4a793fd5a0ba56ada89c7edfTG e1a75f9afd02e5085ed44f2658e4cbb4eae0651263cc67eb2d253f5845c0a1feF e15951b181141f41c8f2c844b7aa0c7216849bde86e989156ee0b61c552f1e75dG e12973c523ca85e1e7a1bda9e7f53a7a8934f6d2e2317722e0a95138820dd666E e0e9c7fa4401932a030b971f266d672ea47666605a904c8918111ca959fed022G e071b9179257c4817aa197ad422e12ee36f90a36f29be9f6e2ca60abe9ded8acF df684901828868b78437760ad8a72aea1631396fef2e225433e7fb330a8ad109kG df2bc66cd99a0dabaef5ee826addc62e5c3eed2e1e1879ea69400de185526942G df28ccd7ef77227c5c16cae66171c438c4acede10c7d0b2d09013712d59de536G de06ff438d374bf99772ccf7c14c80433300129b91b7a8061009d945b3c20152;k :F e071b9179257c4817aa197ad422e12ee36f90a36f29be9f6e2ca60abe9ded8ac9E b7f36824f8a3fd30a2aea5b4644ebc453e9a0e0b2438c8467f269fe4f71ef92bA7F 91165922439a8b2cd02468741913ab8640e55304ad9569b8cd411db4482fd6296E 649b5e5a3ded80f2583800ee64507e8cbb52288f688cc75060052c6fb3bb2834m5F 3993296c1008cc644aa599bd3c31fb5398c901d0652e3c511d086d05d4aed665 8s,Y A n ' T : f Lx2^Dp*V<h"F 38aa9614b7d5506e450628f367a02cefd1ab86f7fdba64ebcfea27cc2c8dcb5fF 382e30052f6524efa959db507de0c1ccd888e9a8c3f82e6ded5269c95d112465.E 373304895ff9ecf452ff59288231adc1d6e62de4b5d8b6f82e662ebae2c0ffb2+E 36850718f8afee5a85cee04f06547d33702edca01a2ff109d066df20a80fffd5 F 359595ca2eb17fa8c075bb987aeb678b37a95f3d15edbd38d49fc084a0ab1f4eF 355e956238901f8286c90711c0e486983594ffae3c228d39357453b311bb4cc65F 34a71c72e78457b9a0558986c652d80e758061c2624f2c4845a5da1d32ca7dc9GF 3468d3d10271019d2d71f129a758ef86ae625584c2e6da4d7f32cda369eed45cE 3444236b55712de38453de6a4889065953733d403ca937537b1da028b0d40de0(E 33d29259ff06a78bad47b3fde54a35bd017e611e149d38fd2131354664c2268c~F 32b84a0dc6a37429ed3cc84a2942408a0caa0e0fe28c8e7ae1947532c74cce7aE 319162b60728c960105db39d96520d779422dede28d9593ff7ceffaf21cc3f85]F 3135b2d726c22c150a19024bd664acc72c7dc983ae891ef5227d59bd1be84b40E 30867449923bee60a535fba6a30b4952ce3d7d8636fea946690ba55ec1f9c200NE 302463456d8a3233048c315376a3c23c822abbaf03d6cb128bd7b02c9d40bb69LF 2ff3f4969be35eea93138a51218d4b54bd8c53f36ef760e33065353476f7d3d0F 2dabb147d02dc8b51d973b093332a3152424113766f3900ea702f46acbd0ef31)F 2bfcf22cc4a2f7953eb39fcca08ca0f644b4fb9bdbc47adbae6351798ab01f68+E 2bd9c2d9440de2b5c381372416ceac901d5af68ee1b768acfd4b7191ea015dce>E 29767b785e6f896109e091e581f00877bff26324fe43d6e205139a41a851f6e0sF 2904f1d809246e937c62c13c109f5e96eaec7b838bce0fd1816bc1b9f1247e5dE 28d97c90173803ef83cadc3e2ba8d6f4f2500c56e53713051fc431d5c43adb11F 28b326740bca3792bcc83c814d272ef26230ee00399e5915bcf76cd1966181f0F 26279986ef75d5bfa21ac9cac4ede057085c7392a954f3ab2dc328c281e13738E 21e8468798aef41bfbe71fc90637db22ecc789506287f3e551f49161306befcdDF 1cb652241d60e6720b31d947b94947a2f358800b3b567652ac61285b02790c13F 1c2b401a0bed6050819f14101671c4d9199942c22bdafc3160017504c294b5f0E 1bdd160848dd4c54b63997728b6073d587d20fa6eba84a893465c21b833fd1d1qE 1b7fa5879a7b26d7bd4d1aafc97e442bef31ce68769d7a336773ac1105949eb2`F 16d4f4ec895bde138348e3bc733b1613c9372445dc51dca585aeb13f65900a70BF 1642fbfcbb453846983e9659a09628f6ebaa45c9f677dd4d43a9edd9de4b5f1cE 14f02e1a753b21fa97c6310768eeba4fb73b0bce5ad3ae96eb66bbb1b443f3f2?E 14e382eeaa90cc239168886cf2010f3890816fb2b18ef19809b9369898942c3fCF 1452ce5a34b8bcbd19e44614e916e2990d93acfb5c7eb4541c1ca1ad2c3f1c75F 12ef723083ae5dd08ca96084bfe557da50546cb0f0f9bf4c2faf92479cb7dc6aE 1255419f82bd5cd8efecdda06a9a0d750ab7552454f445d028f40820efd06412cF 11e5ec89aaa4aa0f8b39c39a71c2d9ad803d0578d4fed6421a3806658d5444e6E 1030389a6172dc23d8bdf03fe71aa3b3869535ded493541dbdc9f67bb4ca7fdf!F 100de45b359adcfec1898f25ecf636856c8fadb090dd6a5b012778ecf57289f7"E 0df68f8548e284d76e7338804a4d6934cf9e1befa82f74fcaceb436fc266f689E 0d4f84a2b76346897f3cd6c2cf5668e65fc027d08860d28e6746d68086f71134}F 0d43ebef4f6dced888a8851bf142e4278872cdef42830e01075fc14e600940dcF 0bafb77bc8c504c18b0ccc0dbb328aae8aa5f033cad82ead9c5e610220957c31!F 0b775dbacca2d07e555da4a40f3d7fcc84da8b72f98d506416e8aea713e7db64E 0b76e2683fcfaa44d2968ca027dc6a4ccb3b01123198156d125e309797b516cc'aA4?_CE[BYX^Ȉ{nKL ޤa"o&;'I U- \T5y[cj+9L^[;_ʼnEKy}BjVHɮ fЃi8+l8~P%v1`N$ =WpOekIwTZp'Aï@f9JX޹Aݪ}P;WCa.k@#k>dKTXS\.Z1zQeb˜~#Iy Nvi"23 Өz&Miz[(%_Ba Wȯ/RCP>x$ la075163965d356368c96bb7e42c120fc1d2d90708e27b7d2dba538c4e86e437bWNKCea5e<$dr՚dVdYSk@^0s\\R_U/܋ ~MI]  J:$W{DEO0)˭ V#ηtEJ㜏8RH`fcA,6j*ОeNJoe"+9q] ԙYs&>.3s5ݤ؜}>&3'DR>>Os~m^ s[o{ךsg!3WϤG_s#6KB=x# l6c54613cd6114ab73799bde932b97ae41f2e5dc193d8e2f62f213e9860ca79dfb1"Vbuٷ uA9+yM.5| ( 5X pBowi : ېt4jwBcG#69P2ܻؕ W orL.58u2 ~'slZM^,!Eeݧ\aهފ3%+X}Z bqnfDނﮮR?JZlq M?z,!Ա@3PAo8f#YEPј>]b(42GhhL\=7:+=2PDWꐋf@PY#ߙ %C+Rđ։rG#;x" lec1b70173223048a9755153af6e0b0afb6a5a18fad1b7be0b0799fd1a25b9a42e0هP5N:tgN TB3eO)/{~x?ൕ>nГsp6gi+U[yC6EZU&GgASao1GJ 4 jnK69=KԚ <N`p5 s&@p]I_񎖄K~9=,k9i7N#m%N☞nVA*ߒ? Qo9[i"Ny2^F b7dfc977da6616b190c07e0c5c1ba1b3a5025c7a8bb9ac6f339285cd0f96bce0F b73cd9814c068a0038eac1ba2c491eb9737b18a513bafbfacee6f543a703442aE b52e16febf7ba901d8d654c72da2c285730f4611f440f86330499e6c98eb93bd_E b48b73e686d9c54dd5208da9f12bb2e57c39fccb022dc75e0297d5d01cba7951VF b471c54f6be8b8a651b186fbe6bd9a8ff48b92333ffbc292ee8057b0c2fda193F b3f938600adbe9d80525fc39c9838f1ceb0d287c09f65e133dfade9a11bfa720F b31e45cd0d0316064b4c869400b644d9e3b2570f818499b47fb8c4b43d3e581dF b2de72248b2a5beccdaf4b1083ecb957a467af7ddc590eb4edfad7ebdd0c3372F b1ef97cc86cf522aae5dc334df7ed7e31af06d90ed4cfe726933378af88e9e85F b189bca3703142eb6444c32774842c71068fd973bccb02fb6fbb6da038b2e2ab;F b082109b417feb1f94043ddca5c66cc1f88bc5240386f6abb587239a1f989a0eE afe96d3650acc0fb4c044866b93cbd1a2d8926df7afb7bf46d7506d34f837721OF afca9e51cb2eea4a4459b21327c6bf83b285df58e47d039cd5e37eada5519465F afa46efdf35a5e0e430c8f4f8b689eca0ab591a4da6804b9932549184442c255F aec4cb9e430485e9b2b777ca06f24f8eb967d6b1c2dbbde7357c034f203174b4 F ae33690101d2980e5c2023d14ba2bf4402129477138e1e69a07bbeb67c47e0eaLF adf88dd9ece9481bf026f5c523bc54779977a4def8416a4b3432078b3b4f3554>F adf3751baaa170cca881777342df600418b81f8d496bb4cb143c5b5ae65cf825F ad7d5380e5d63fba08a04c4effbbe945805ddde72960ff5edac66b4637622094E accb4a8c00d2ef6bd887a6d09ef7d7a2ff7d7e55eaae7e30a4b512898e4238d5E acbf26437b975c04909a3d82b35a452dd2c051bea26a70dc0c118ea74c22df5bE ac1ef50cafd69c4e620591f1fb2df062b6bacb0571c4be58cc756d359e253ef5ME abc219aa3201beee75280fc0bfc9e3a45711cec478e0ff919544eb9c780737d4JF aa227c5ac4f4310bd5ce4fc0166db60a20e59e256434f9b72508e04390788df2F a90a6b44ab15c1f3ca08c8c6ac567faeb6afe4f64a8c2a69b370258d254334aeE a89b6e2543413756c8a3717e1e07d11081d97736f8566fc80ffd67d68c20f1e01E a866e8c2a8067195859241fa8e1671c5aaeb53608d8e4f39116445c6d5373cfdF a8449f785d9b6245d6ab23b5c88f1cff5b6d20a066339d878eea9cfacaa361afF a6c9661b31614c60a2a58f57656fd7c6a09d60f519424e59a2d5cf4ed5cf64f4DF a603c4228cc10d9255ad0067545b5b94ab7ea4dcaa996303f080f610d3e90a7fF a4d0f4e6d58bb4e21d5f338350e9c556e5b42b4045c69678660b8a0750ea79be#E a48c5cc38e8ef3fc7ebdb56dd9f5c51a2a7e4010f376272080d8a52d6b146801EF a43bcf8e286d9b75592ce51c33b6cf5537b1b8c2ea7b45040bec81546ee08a86EF a42682a4de4748f80452dacf3938d4642afcb1a3a22ab73de78cb2758a21653eF a2adbc9685264e448772ee3481d8b3868fbeb534ddf3c9ed6c252676bf606d22F a25ffe70cc4edeebad9ea2842bc7b3d706509446e0e6ba2e02d456d271015623F a070c5bcf2ce534698d2da7dc73608ff7df9d0fd66bb005f042310ee0d9f3daeF a0476d08c4fd40681a3dab6253e5d6c51050db990e92a53f4be6ea8bac0d7a1c4E 9f1fd3d037d12a0927b0cdce7aa01bc14a0fd37d2e320c8262932be449c49c42 E 9ee5e6f2ff5d78a4c978280d0a35ebf6ad7fb12b75707cf8e48ebc02492f7bb9*E 9ecd2726fadf2b61d20e1a70a3e6ce5072be15b3d55a02f1b477b650e5bff475^F 9e2b07ae2aa9472cb2f4b338ee14d37591cfe94801908157f2a69bef5aa69d5fE 9a344c4069e977f37e4e164060a609c7d43d6bdfe613bffe8fa9db97fdc19290:F 9a00114455bd1ade09a2ff97c384236bf655a90a21a6fc57dc5ec41ba5a318b4E 9989778a8c76e2d74b957135530ecc2d1e561f8da8c06152e70425d9c351673dE 998802a87c82863b8d9009160820a85e2d9054a1d0a849c85f3d671a48db1560lF 98fe36f65b60004810221adb678907785c9e80569081a52680512d025ae2bea7F 98edfc06d826f15381f376ad18aeec928dfe348c49f98c66cf7ee437f0a4942eE 98dd373e95d9e798e529e61800ce76c94022540641792355eb4fb5b8d5f76b36jF 988472a0d490ab4b2538dc19404f3877f78e50d9a61ebce80e9fa48336d90190F 96c6acb39429b95ded6ef947bb50cf2856b39556489bc859480233442932bc96F 95ce19c921a571bc14ed150077217bfba818b9bd7edd7101ee9d972be2ef8be494/*%  {vqlgb]XSNID?:50+&!  z t n h b \ V P J D > 8 2 , &      ~ x r l f ` Z T N H B < 6 0 * $     | v p j d ^ X R L F @ : 4 . ( "     z t n h b \ V P J D > 8 2 , &      ~ x r l f ` Z T N H B < 6 0 * $      N M L K J I H G F E D C B A @ ? > = < ; : 9 8 7 6 5 4 3 2 1 0 / . - , + * ) ( ' & % $ # " !                                                                                                                                                                         ~ } | { z y x w v u t s r q p o n m l k j i h g f e d c b a ` _ ^ ] \ [ Z Y X W V U T S R Q P O N M L K J I H G F E D C B A @ ? > = < ; : 9 8 7 6 5 4 3 2 1 0 / . - , + * ) ( ' & % $ # " !                                         G  A z |xx\77]tablejournal_journalmemberjournal_journalmemberCCREATE TABLE "journal_journalmember" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "key" BLOB NOT NULL, "user_id" integer NOT NULL REFERENCES "auth_user" ("id"), "journal_id" integer NOT NULL REFERENCES "journal_journal" ("id"))fYk`-- tablejournal_userinfojournal_userinfoBCREATE TABLE "journal_userinfo" ("owner_id" integer NOT NULL PRIMARY KEY REFERENCES "auth_user" ("id"), "version" smallint unsigned NOT NULL, "pubkey" BLOB NOT NULL, "content" BLOB NOT NULL) s_I7;indexjournal_journalmember_ba73fb5fjournal_journalmemberACREATE INDEX "journal_journalmember_ba73fb5f" ON "journal_journalmember" ("journal_id")^I75indexjournal_journalmember_e8701ad4journal_journalmember?CREATE INDEX "journal_journalmember_e8701ad4" ON "journal_journalmember" ("user_id")C]c7yindexjournal_journalmember_user_id_c09c05b0_uniqjournal_journalmember>CREATE UNIQUE INDEX "journal_journalmember_user_id_c09c05b0_uniq" ON "journal_journalmember" ("user_id", "journal_id")UO+1indexjournal_journal_owner_id_330211f0journal_journal +ZE#MyS+A4vATo{&%cykFRk:ܭ54GG]z\ V/) ߍ+EO-~uZ*WA Qbyqw r/L5Cu%]6K>-yw.,Qnaw=V`] QuB:F:+sB|ݲ@%9Bj͔L|Ӣuȝ8snT|D'ZQ"JY~N#JG]4M& $["x{R>M3V"g4 S4BgM+z}[Js#>'Ϙl=ua':Ԍ4s"r Q+ M#0;;1ǃy/ l64b5d2590752311bd0ff3670644dd6c4ede10a455c9f0d41e2392513a12fe9cfI1%^|\U *Ԩ(5 < =koB$Tz cA*x?j|{>d\85]Å4a{ OX`i@#wd0FZ&U""Q^"p XN:qP^rX#"8Lt Sɴ<3}"j&t/*'H&\ hmMGf)=K1lq12m(JڍDf FA?Ńy, l17b2808a9f3db260c68924a74a66b83181b48821f20654be80a8ec342662fa54hZELh7%vjhZ,*j pH9]H؛̒tSfA~s^\Bܬ/=O`5rޅ'v !4vYRMEQ[Ty Wyn +PN "g_/IJħU pF4IָaS|^T9A{ +f޵%0BwЪ \o>]냎kcw9n^U`Qs$k"g˔hI{EZn]y^i:*~)I}2>!=߲z4b 8K~#M0Q1\01T0e %K1YuJ%22_9u`qxmm鴁#JuSL+>k4[ VjWHK#[j%xC6fx&0ئЊPN}n:ꡉSH(L hB`.\hnGƳgmbVkZPSFut&Βm[M~B*Ńy) la574c779d0855393152a15bda61223de5396ed3b98138cbf5405ac56c71b7ee5N;&We!(wiםm1?[uڏ"alZ&ͱV;JCY+٣CPZ#hm1?Rotp~˹tc#q4 'IHL+v, Ev ,5"3cRKh0/UJ #ص7<ܝswfȞ~3o3q|seTKaAw~ rl ` zۼww]Y\ȍ~oxvrߐ]+O/#w&yPFj`2Y=b*g` jL9RݸT&oәzI.O4d3iLIJA̯`v/ @F'|4eEȏ:'3{@     y8 l20181daaca7af0516491f167fa072648b6c765eecceec2b7ae96e860547a68c9 wڻ` ̢T^O6 o};@2d:2xHUDz_\Chs/y0%y4s`oM^>]t+pY1!kȉ/P$?e878NQXlGnևZ?9cfEVB&aG7N[-ix'ޤҲJXRm ,*Ɨ XUJQ8߰[kP?wxi4`*X}pT;3k?-&+2m4S;a:bXy~]lFCϸQlWj;dU ^ۧ9>f3u4gJwrgσy7 l31d838543ff53625264e513e1759e49bc73144fa0592b60d35c97dda1d03d08eu $T7AP;FԵ]9J67ڶFA nrRY40ȭ8)5'; FZVSI<5Aھw,|O)Di\5 @9JؘԾ7zɦq?*Px- bq~TN5nch57Te&qr3C(O-iO`*@FJꗰ)mOqSZkgfM Ay]ToRw{M 9Sa&p+E-KC>t;2̓y6 l388b0eed477aa7e3239bd908872bd4c179069f9a2a06e6a608dd813a738e0f8e u|m4$Z -ϩf2s݀k[؂YaN܂#y͜ R) fz{ɐ]:]-NbZ 8Љ232hO6/v)O|)FZ<-,2l)43U ,ea?yJ 9>t%?j0gw`n>6NhL+#5A@Av4ϐ0Zyg`xf(Ymh}}E ~mM=dQO=^'!4ig"2`M8-86"A^ɺ夗7Ρӝ^G`ݕgX J<@x w-J>0N- VnXVff"6(e2}t1s2h&jllsEuO:@M5A˿: ĚFM-OԭTfmbѻxD]n]&cziu]~Sj 7i}eAV0KPpKr-5߫ p> 2w~}uL6jRA %hg[TZlբ|YW0rkJʃy3 l50c9628b7fa9be032ae05258cdffbed3c50a9bb4b992719bd4aebccb501feb86G%tmʚAPؓ f+qMM)9,zh%~[);_i X]׹T*.7B2p/ ~1L}  iko[=1zk0AZo3?ȃ75sNVo\ݾH 3510ca436499e779d52f2e12487b1fc51221784fdac9f678ddb9b3f44bae9da2GG 34ee8b682aa4db03397375fe1371679e44c7e592d302418af82f86c46e9d54dc+H 31d838543ff53625264e513e1759e49bc73144fa0592b60d35c97dda1d03d08e7H 3049ab8b0425e9c135e982f4d05446e1d5bf9e3d1713689486930db00b6c1291CH 2ee54b69c822f32e604457d8c058df9260ab8b1a6ed14a76fb442d6e3086df3f bG 2e8479d38b82b87f686946a8a409b5cdb63ea2036b9c89a93ce60fb1def9e4e3+H 2a4c0d80badd3b4e9c9cea65e19dc35cd4cc1d35cfc55416a2f9364642844a1e4H 20181daaca7af0516491f167fa072648b6c765eecceec2b7ae96e860547a68c98H 1b160c04312e620cdc6197da0910c879f9eb77db62d593b4d5ea00e9340624d1QG 1ae407b68dae31724172e0a595267c38ee0f7f39f2203dd548d4c7cccc366b82/H 17b2808a9f3db260c68924a74a66b83181b48821f20654be80a8ec342662fa54,H 154ec2d3d377fea03efeca8401621c47e1705854023d1de09ab35b7e15dda45f hH 1523fa8192d566d654ccbd6a421fd361daab03b7c50d39c4b4344ac387a5d5f45F  14b680da4924a7624becc663a44861be3bb29b4417b39aba6278beea8f02fc5d+H 13eefe527bde4b0d57bdf98fe6a8af9520a7cc52b5857a7f1f297746d39fb952HH 13dbf4b380ecf03ab1717554af25e0e24a97dfaa218f0ce06816f989864ce1a0DH 1396535ed3185b7385d0cbf0907af1c41a2d8ad47322e0b86f740ba52a1c77b1TH 11c5f00dc8198ba81d468729cbe56b5ce5c91db26d1b63bf553653c78206c722EH 0bfc12521988261073c8a238a0718e0f974ee0dda92de9772d998e6efc4fe5ea9H 086212f3e866f7cade9017a505527bcf5233b3ce5ef544e3329799154759e27b `H 074b9794b88cfd3e16ef7b47a2354732be394c3191d38a09dc41ff2b74df631fUH 06f23fe2c8d51b6843dd375e0bd694201fc6388b6af459739e964f10b7659ea9%H 6fb24a8d6f4e95e92f72fc1f3d96617eb8efa94cd757314c8e827b6bb8a71cf8xG 6c54613cd6114ab73799bde932b97ae41f2e5dc193d8e2f62f213e9860ca79df;#H 6bde0200055612f441c6ac95d74df1022ce994596039f62d998d5531fd207de3=H 6b9a7b99e083d7a8097f80f38c9235d5e7559f3e16175a63df53f7117624b74dsH 66b3bcdc68e018560b37f0d2d0e6ef5eae6d06c915a9b542d43204630b142caanH 657d9a8ac9160f9279a6a260512ef8064deddcb637c3cadd9a45ecc192ae3ec0JG 64dbfd269e4ec4929b706a185dd4a78bbc637997465af52037d2acfb468ea68b4 Q45s-[ B p + Y 5 A  o )W?|m'U =k%F 19b7776766e3acbb02a78c6b7a56a006f052d0abea7b6d1a5cd73c04eccd1149F 3b5a7a9dd245ff944827878e432427a79b46ffe920ee0b29fc982826cb632c52F 2e482f26c0666ba1d39dcfc2240a1c2b5e4eaa198277f7723624f23004edfc18F 4b8350f81c3705b5e6f274f1035ab8dad6b13f586edd0daf5ceb3b8957bcdc68F 19412649ac94c1777a7c568d7de9df4dc35da660f26b00121a081d570f255adeE 58443bc7e2a6697fc235de6ccce33fc3b07fed36b3417111b162ff8aa54f96b4&E 557d7c770c2275f2d194e4307164d0065e102d730f2a052505ec8511f0aee1b8lE 550c1688420270558c6b84a8c0012d6a57b345b6c695507de54be572e7e3f3de(E 52133978a26177134c46d3e49228cca7fe728fa038e26a0735a05e172a421667IE 50c9628b7fa9be032ae05258cdffbed3c50a9bb4b992719bd4aebccb501feb863E 50909028462cd92b3cb3d353cb3ca69f5bf4826b888adeb336dfe36ed5fad8f3cE 4f42be0358dcc35b966f39440a579a5442a29d7d14b80d6ab14595525f58c709qE 48494d18e52c545321d50e465e9c75a62c9ea2a3b4aad3c4e66b8bef2ae235a2E 46399819d2e099b1124cfa1796bb0ea01024ad560be15e03f67f32e8be2d937dE 448657baa4e5db455642314520e684981312e0d5c4dd9a6885be3f7efc2457f2;E 41f029577bc1036256038c13e083c54d4bad242e20c6eda7aff94b9a973fe03a|E 402704d6b04e7ca6c864eeea84b65b3c34d7ad9a35c12f667a63fadbb257d492*E 3d38dbdc751d10b4fb6804daa96c5f863ed3d2b6843f6473c63e7679a996b9e5kE 3b92139396b78a0bed0c5af59c06b0756e3a7ba0f49abbc6df3aacf45f3f6e99PE 388b0eed477aa7e3239bd908872bd4c179069f9a2a06e6a608dd813a738e0f8e6E 37ebce42018f0a4e45a24bab59383ced115e937e5777572562e5162787d8af20E 37a66dc58aa3db809c1c82ab4f46bedbab1a11ffdfc50f3c7a94333f3a9f07ccE 369fa085d6a8a1a3af83b4054c8631f6fa11b7acc07af43034bad0f4fb8e3867}E 365e503d4c48ef76f9988c0da8655a9ec01671dceb398b824613dad91364fc79>E 3510ca436499e779d52f2e12487b1fc51221784fdac9f678ddb9b3f44bae9da2GE 34ee8b682aa4db03397375fe1371679e44c7e592d302418af82f86c46e9d54dcE 31d838543ff53625264e513e1759e49bc73144fa0592b60d35c97dda1d03d08e7E 3049ab8b0425e9c135e982f4d05446e1d5bf9e3d1713689486930db00b6c1291CE 2ee54b69c822f32e604457d8c058df9260ab8b1a6ed14a76fb442d6e3086df3fbE 2e8479d38b82b87f686946a8a409b5cdb63ea2036b9c89a93ce60fb1def9e4e3E 2a4c0d80badd3b4e9c9cea65e19dc35cd4cc1d35cfc55416a2f9364642844a1e4E 20181daaca7af0516491f167fa072648b6c765eecceec2b7ae96e860547a68c98E 1b160c04312e620cdc6197da0910c879f9eb77db62d593b4d5ea00e9340624d1QE 1ae407b68dae31724172e0a595267c38ee0f7f39f2203dd548d4c7cccc366b82E 17b2808a9f3db260c68924a74a66b83181b48821f20654be80a8ec342662fa54,E 154ec2d3d377fea03efeca8401621c47e1705854023d1de09ab35b7e15dda45fhE 152fb641c5c968d8cc7190629ef3caad7ea60cc2e0664c6c05ced3379029e3d5pE 1523fa8192d566d654ccbd6a421fd361daab03b7c50d39c4b4344ac387a5d5f45D 14b680da4924a7624becc663a44861be3bb29b4417b39aba6278beea8f02fc5dE 13eefe527bde4b0d57bdf98fe6a8af9520a7cc52b5857a7f1f297746d39fb952HE 13dbf4b380ecf03ab1717554af25e0e24a97dfaa218f0ce06816f989864ce1a0DE 1396535ed3185b7385d0cbf0907af1c41a2d8ad47322e0b86f740ba52a1c77b1TE 127e5daa3bceaca626bd0fe26b82d15b9d327888eaeb8a330f5dd6cab1e15783uF 121236764f307f3343859b7c5e7b19089189d89a85a12c70c775c8de83412c79E 11c5f00dc8198ba81d468729cbe56b5ce5c91db26d1b63bf553653c78206c722EE 0bfc12521988261073c8a238a0718e0f974ee0dda92de9772d998e6efc4fe5ea9E 086212f3e866f7cade9017a505527bcf5233b3ce5ef544e3329799154759e27b`E 074b9794b88cfd3e16ef7b47a2354732be394c3191d38a09dc41ff2b74df631fUE 06f23fe2c8d51b6843dd375e0bd694201fc6388b6af459739e964f10b7659ea9qoEQOssarˀ}7Y@cCgyq?[uY]` n$D1D8czL;;z^ !|%=#fEC}q,~>RH{k9xPl'o! =ixm>9_1-KX*&0afcֆB&r*!6KFWAW.փy? la196b5908cc413eb1f6206384dbfa7a4726f2f558c32026aee295621c88dda44>u.6M* =W!s¨rQǕ"ZEَ<啋H$W;qYMBQ׵|[3 k4l}ޭsh+ȊC޷~|" ܓK]c8 :1HPP/]9p[ÎN?e' /)cz/Th l365e503d4c48ef76f9988c0da8655a9ec01671dceb398b824613dad91364fc79^Qj^J+>)NvhNm$dt=3=lСOD5N4 3uQcN56rX];ùj0 ys{,YxP}y ~R µ^h)7I~n聡I9UHx*^-M`;d? ~c itcVIqcBLbjizDJE>F޹_9UfX6د@Ï 0 U0:%EC:t0r؍M7+i8q E~8X-עҽ Iyͫa5IJhUaOFs\ൊli5Sج^sh9:~L]jTi̟_17G IT<<3H^Z^E `bԃy= l6bde0200055612f441c6ac95d74df1022ce994596039f62d998d5531fd207de33rilu-N]VnQr5>3XQ@f )bVj%<|䖈I*EL t#/JOUY>m޽$gWt33n\P+I b5DAs򡙩9B|a8an3D\N5/v^kʶސN0~Ysb荿'Bq!Ŕ"\t1f +I#Au0 TI\}ydо>>v|MdFf]#aK~)Jˋa@"N{h&y4A!ObsD9@+iѫnnn&JI:[.Lw4T1sD Hk8 hdtPӃy< l06f23fe2c8d51b6843dd375e0bd694201fc6388b6af459739e964f10b7659ea9iwlj1KQ؎ѿ+5b]*dC vi KߋS q1^qWlQM̊\ҴYVrXH'zU ?lᚏ !}`уv۬.zw\qxPX@]pFŒ:Q Ä^A#p,ZOHbw3 ;ٹHӃ4]u3Oѵ,PV t6.^xšc56X-!:QL٦K׉'?[ fn̨fˑ@CGS/Lۼe9Fld\UƧs]HB6wBgL9PaȎ6"drZ (^9oj L0^Y#+9nуy; l448657baa4e5db455642314520e684981312e0d5c4dd9a6885be3f7efc2457f2tPub^uIƒ(ZKZRNTBo jǶhu`A]bG57|ORVHCab3r.BgͻsVi/ZZݐ)/अ(YUt}@vXB{ZojRR:P-|G"rLn+r_zYvq5wݸ<b2Rc DZ/\.G?*Z)-&0+j\1_CIsK60-];S?ǏHt0"q.M^$G\zOA^ ~7{6 I|ȡݖN$=3q+IM}{X>#XN/x8Hϐ6{+X20ADP$G3o2BҖw`.`m1])Av̬޻qL;} . I-zsѠ⫀bd'Ҥ  lt|yI l52133978a26177134c46d3e49228cca7fe728fa038e26a0735a05e172a421667A+*P؁a^{HȈNH h\AfsDv`m/V,W6X"FW y!! yŴzLȞBx9GrNfw 3ET n5AbApWk"FWQc 'AE^?bR u5"[McMUFq]UDYqs \Ge/EC}@eRt񠗭m:@*QV ";}$jC$2bMT`WD3}.{bĬk+߂yH l13eefe527bde4b0d57bdf98fe6a8af9520a7cc52b5857a7f1f297746d39fb952t0v8k%7o8lͬ1 s}]/ww4{pǴl sQ ZhϣFJU>aoi3؆E`9a Xw,o h*ŽgcN*y$x-zJ8byIǯ7 *Bs"#vHQ$f!0s+UX\H&nayG+, HlRϊŁ"[vV ɖx)I_C hRn4N6M傜Y jtLZ$AŘa#\ S?߂yG l3510ca436499e779d52f2e12487b1fc51221784fdac9f678ddb9b3f44bae9da2Uf@e,7(srIhs)YØ{Yewj^A1uz 5`Њ"kعBS5~b1Ɂd.u>=5Ljn)AZ2d-?`Jtj^QYy;J9 gaRCHt1EX ;pddx26,ӆ`SWE:REJ PX D.G?KNvW"Y+lm{` ?zF+ :DB`tՁY?NPJC~D7(Ẁ6v|߂yF lf7f9501d548ce38d9b61ec540e6cad0379160864a2c2d71ab4f95f40f94ff6e8r &':ڔvCw^7䈜5E*RS4`ĺڅt%?R j2颬M-:^NesHa6"7"Jn"^P#g9laF0T貁6_so4>CVQT :#7ҷd9Sz5^8|ؓM+/O^0>I] 5u'w^! 62%xR͈~p`HasxݍBR:k㻻F +W擶AseV߂yE l11c5f00dc8198ba81d468729cbe56b5ce5c91db26d1b63bf553653c78206c722¬웋\!@Ycd\"c.g,|g"uE)^,QߐkY#1l$BA_ 8 ü[xhyB^cv4xwㅪ@]UyTJj̙H1)֏^J(BuL;-+K{: Y l*seӓFIdFfBk[#A}I}gD\cp𷇡zd ! $Xx m 9 ߂yD l13dbf4b380ecf03ab1717554af25e0e24a97dfaa218f0ce06816f989864ce1a00'8% G_0%h7oLR͌_}[^*tN&'l i xt ^BԈOp%[.ߎz>mJ"Z:(?f zw.tMחd釭&,*qkxs8~Shh8b!-{Lpsb!^S!`С!5)^"Hm]$ukA ܫ"kHy Abumi # dcN[;x?\G1P <6_d#d ͉fSYщ59|KJ靏OKBTKlcni?آ,þ]ÅqŎ@kn?Zif'P9̌%4, JFaWy&^ZMXbî;k881O`ԁ=J6Dყ jqsS4€Q)=S8c%tsv̵"M0jn;N'/ω;p'AH-X`t(v4(L2@l(Zey6|c0}@:lCG_5/̝̂e؋G ^_g l3GUTmr{?+jxLJ25wc`᱿A 2'du;yQ l1b160c04312e620cdc6197da0910c879f9eb77db62d593b4d5ea00e9340624d1?OS \Ffdz` g(%9Q?l6l7Ɔ?\I>o~r`4) T$l.`,4]5E6ۺ{"6#$XLRSnF-!fvuV.BAR۩)eA7uNC̤=9,0#^n|:}rW o5C|ck.Hwp"@pa?w?H=ԜNW];v%%&y!]<yP l3b92139396b78a0bed0c5af59c06b0756e3a7ba0f49abbc6df3aacf45f3f6e99[[-y# wc.EԸL@! "r|A,>jdzS$`"fQnz<'Ku5D&[BsW fJ%J]pEPSO @]@]7>|y Ț]Kp|MэZ;I$u&G4(w*?pEs;yˑY=@ #ৼH%2XRimWBDzoh0})4ch5=`Pk=n|n"ߞ;Fo<yO l824a97e9c026bb905aa403066ec83c15de3c0b955cc885bcf5580cadd900afefI9c)]7Kx'`(/3H5)h:9p@*'j\ f '鲉}1\v|,k 0*-{GLL⻵~w!t9 ,0 Psq[|N?BBEqZg@CIo0-&$/`p)8g'LftC<6WQͤ0YZpc9CRf6ws]ܑ "g C_/:V#QFN_W&!yN l8e00ca89c212e80bc31a4dcb89e152617c21c1523f8b8294b232c05f8c1aa8fesP& RgH ?b+v/E!(I~y<b*3r/z`PCEӫZCIdQn8H]Yx9Ǡv43BG[ķ [)7bڲ R\n =I:EG]pl6b^u1)re?v VŅ'LQ#WK$1Žmt[דT`k>yf|,J E&#+InPc GMG<^yM la15b980d7027c2e3429f8ab006349ced7850a1f9175a46dcb5e7635e55acf6a7HQz;Y"\:uSt=&Iv jOrS\G&|CȜ)pSLxvI(hAO`zQ0uo+ѷ]5u,Y 9͚ƎP9 h&,'y$sQuy#?wTd8c}%RQDONbZ?#U$ 6 Yku!HMI[ovmbRx&4NH~ qKp [fJpqsY:,,[&w=C,b%Sfi6Y f|`b fD3VhyL l922cc7dc7fbf0ec05b8c1cb18e6f535bbec71fd98009382aba90e49a82087c8a5&U P:X8;jz:rae&#R+Zł*Wj`"8ă9OMqCu!nzw d8hУ*N˝ڱߛRAZ+=oe$6*cPoWuεQiA_\q>rHÝyLG*!,z)xI%GTâC65~=W6Q?X[M4d䝣j'ВGwUe%Idw~݃ Q |tRШC,HF .x0uݗKRZ߱)4f#6- /9O)gLJhfE;`Zg?ɓy\ le66413123736255fc1413f140ec84d9ce1dd948005b53d9398004ea4915cede7lr԰ͿĹ13E6ou[j&Ёh,R3wԧI}D{2t+p$\F>ukS6?Xi}V>s(3 qVdjFS-o)~õ'8jZtpu"25Ǭ$2%;r2R>=5)s^Wɱ:,xnWRJ);>~""3(Qrs:u~18;ZYS;_~HdTGÛ+Z1pjd4?Y #FTt~(y[ ld36c0b7cba799cc0ad4859a7918e5546fbc33d23253bc439ea3f21fb21c93c89QfLu\I;WkkU25=8xh\Q@eɉ'*D#l+V <,-M6P$Xp$3xm>!sĝ;x1vݸ%WhmǾ`_E͏7O g34Zp=ǓJ( kE7@#,|au2 3 T]Q%3~ј{Ed\~ߙ[jXR.3m5W}p1v5.96=|t3q2WPyZ ldae4943fa8aafb7b2871e30e8a067b25b01ca5e7d5f08215aea9ab13ff322a36~Sh:)j0wJVQ_shLbYWpBL0xm ivM.hB+V?Xo7` 4"܌&avVy݆Έ4+fKKUS-, -_}Za0D?@ 2xZҎ#{=j6_aEbӍqzLjmj~z d {Twd{s`CԼj7T0~J/V|'7( v u{#SzvV2yY lc472c1944814aaa582a462a87e7b785bfd8014d142b779ed5c2be14b01d1950fɰuCGk<4аeqY8K T)p&dW}b; xG6L#ʱ9$l*>%?̄Wjv^L@2䌛Lz['Us`9ʕ& pTڢf_H5֥j=!bUVC**S!R+Ӡ Vvn},doF~ȥ!b^}+XDumJ12م/dzdedPngoz~fq}3qo; jyX le01cdb49ca6233e403957b852509515dfaffbf88d28ce47110cfa2337a04f183G",QiiJ"5#o6i`49o (-UEtAI*R/[DXP9 J #]O+ 4u?ܶ:3g+ㆉH 7z3=;ķ/. -8 g:M|yV lc4c300479bb63a00417dd4e9733e442aac52d49820ccf89244ea11de6f4e54f0*c*\U/;qwǽ̩/C0pʷbB)|0, ͢1IsFH ȫq!RЈ8 Ni@`K1qkSO E8蘖dκpɄsD6ɒU6x?%|+C.2)Hi4͊*;њƌao0Mb{?KU @ZڐTL$\EA,C Tbs~r|1]boٸd1p'"N>n:~MyU l074b9794b88cfd3e16ef7b47a2354732be394c3191d38a09dc41ff2b74df631fk6[i/(hg3(Lɟ_jTvwv1+>H  ev:8Ցu߾y ͥN_S3At! *c^=?v; mhozvCGWwUb= 6U@?. c1I$PD%Gj+=8W Q@"ڱ-S A uVʆt"VSv/K-G/\#OA0nqR<$bb&e;I.&xK ZGSVyT l1396535ed3185b7385d0cbf0907af1c41a2d8ad47322e0b86f740ba52a1c77b1櫇_$ydEHuD˪lD7#@[Yᾘ00meha0'}ҙ(c2֙\CZ=fc3!7,4Y7<(\(JzN_*巯 mc\.~˦^';?E`e]Ɣ'˭#! K{O_YQAQtd [L \Rk3IOE{iTCMw3e rD6mŮf;$YnL dp%FXLttE:+ in??iQyH t yd laabb036d010f8765c57afbe70e1ce5a5ef7502441a988a9ff44f0441d34ebafb)Va,͊LLSS$;C nch#!H'3aT>5:X9dm68P vD|8`h\x ~6b7^Vx#z|yvg( "zW8a8R -7 ^,BZ-s5.D@Tg%3V9}3~F r}vҪzUҘkZ+i y1ԬSm/Zp_67 0+;0 #}8=c2iAtN*ˏDS;|/\@[e1<A` *I/#apT;cJ M{k%_ ׫ (H]횹q? z9͸f?$)iCDYĿqQ yc l50909028462cd92b3cb3d353cb3ca69f5bf4826b888adeb336dfe36ed5fad8f3zVqD.i!eͭuF_=HMD5y=v$+M52e7ݑj0.p&BPڨ|i ,]R2 3Q%x/1>H~r\B.QϠ&SH(4*T%'x4}P67ASN^q Ĉ*C^@#+ۮ~3*򢣛wzVp_*msqT-cBycjkxm5y-Tu1o"ϢG Z $ P;lg.%k&"tmp{?B>XUTsA'9v)scɜ1aeKU O6VUύ.$ [VٳQ: t-ەUs/9y[Ryba뱥*$ڐX yb l2ee54b69c822f32e604457d8c058df9260ab8b1a6ed14a76fb442d6e3086df3fiP6.?)Af3fj&szb#S8 1!8( O?nzqM\ma>>bsVƤ#Smm\Pn:l+Eto=3~12 iN|/#TTrtMOT ٘5$ zkV^Ks>! 0֠W:ĔCHX[SmG[.4'ڴHo ^\hiW2na7ߪ uqTxi2)0'S{Y';ţlX3֑N@ r-L2 Ҳlߗ!^|U۳~+M+s(,)_dJDֱ wṫjj'yLϴ<ךG/RkRMII2VIveSQ `'5NՎ@i n2YF`s)2لx"LTV2dGSp0j¿V y` l086212f3e866f7cade9017a505527bcf5233b3ce5ef544e3329799154759e27bOW"ӡ&u p{Lv||$юAy;,2bs/f@R᜔}N! n+׬K"]OW*z2P>0 7';19.r_T[e]vy"]=<̶`vL@'1H6.as-h(-cd^cllIppު^f J2Z:9I?m`p໖}*psuBIH|ng/[xVHRx!&t>gazZ4h?B6p"BAsPl͟hߕ}8n;$2eQEi*hW҆-^x\_-@6h!x Vh0`69OOzªWHI23l )_ Ld7e65fb3045db417c1ba681697234d11207264c0f73b70c95782744e4e31785b_ᜁ͐0#x=EFln=ːlMg$ BsK-5-!kht< zdP+h?!U;kD-rUJ2 Ioi/gt,k^ל%fcRhFրKrM,\@5qzE vJCʊoK)AlxmUU%P+]GwJJ>x,j2f2^qc_$tݱ*9͆5aV`C3jy:0ӫFrRV)3[AfB*IABks]FǶl(fVds]:~1Mp'|s6Ic.;#BH\;'{ٕq#TM7qٮv+u2dJRCmXD ǧB*K(Kq|ywWAqUB{WC^Xv- fˣ/jiK5/A./y֭z=H; 1?}1u {FN7ļhf"$Ubrgɇz"D2VҰx"Z #4&j2` >\cA2w:óї)o9 |15JVXvHZ/}-UW̩? ʷOzWs.qmB\Z=.UE(u|M#6q~8& _WC%5 #ߑ?V/sCƹG9,o `[v,Vni7wfַ;Xz'u\Pq[YW+\f \Y ^ 7c6f40140d5ad6ba99a2191795f1aac7b8dd2cf51d97a85c2c56c02a3f4e0cb4!q&CE6|sQC^0# JyMq=8K$ċkZj㹔}yWU)сoxtaS 6ڒ{4YJg˿mTS?XkWetMDW뫏3drRU־,δy;}Q/h0h|ڝ؎wAdW>mr!v೯ywŢo0>;~@.Cpv7*^2`3'xg<5Q}qhW+i >-˞>;n vOh     yl l557d7c770c2275f2d194e4307164d0065e102d730f2a052505ec8511f0aee1b8q~o2l2e@KsI{<#!/^؃yR6s$Al\xR1،0B*Ax "\͝FQgs(eu:+ᅯ_.fL+CtGY%P;+[Ǟ,br76⼋[|Zo"Z#un?cW9R1?czT Qd<>߂Q;|Pt2ndlօUpaG< ӢeCF~Pxq:0KS%l5'V}ְ$5.HY֨򑁮'4epN9ib`ip, Mܵ[a9!](oF.ϮE5!eR3̂8{%`‚X"E{̏z?ayk l3d38dbdc751d10b4fb6804daa96c5f863ed3d2b6843f6473c63e7679a996b9e5=duA/cÇJJT!MJV+a`/r_񺵡|=!?MTf6 FE5?3z͑k)19T;t82yvC)L(= Z:I-*4KP\g:ϗ/*-+.u!jFr4i rFR)"Et]k; Sn^ RZpgY? R%Z}j|GpXW:عiY*M ֿ$5]ռ~nmznaAqw7Yh5v}7! N/tт!LNZt!o;r{%}ѓ4R!'LL׳V,42J/W`sJ}~Hc3yj lb02e5203a3fad5863880f98adca478d0cee48d716a85d3e856e070413db64b73B{8f,oӽ7ORaK瓌%=CC1`V=hemg}yRX ,MXC)`b-WfOՙ];8W+ƼZ}6 ̈́<30GP50)f:* gV"4Sc?fbm$&+" 44*zj'6ޘ3\)7./%Wzο'*4vI(eHMa¥|0S,l$W?&2v֤L L[lՑc 7O mm`G4%)4#mn-hu.2t.'UHܤ\+Jss(*uR^ ~<ڃ,nhyi ld314d5e6562e3386de71c39110f08a3f1bd6f261f945a44ac069c1ffc21cb194j_G4 F5^ϳ0R^;m1jeH %3#{VL˅VtGeJ{%hUB8v3LuĨt"&?us d?kMJA.Z~$G Q{QR05G/ 3ۏL g3vm[nBB9lx?͝ۅ$5[[Coe`Kr$ I d5PkUHt}738A XќxxY,j%5O2@ޚ#Hr윊wNmүi~9ȥ n<>ŵOxT^{6(׋v8U?T׫`z OOBO|$(fk8F Cvt}<rr&1Q'B6ͭ'~@U./peWrfŞe=cCh˳K TsŶi3R@wW !Ư NT' KF M_c@~ "G?{!\` 9$א8 9lt񞃗mO۱JS -^Z/D;mFZ#4cə?B 63x2YSm PF^d E`bK>]+&{lD3 yf lc157bb4f35e56e6379e382f78698a3ca86f9421602d9bcb5a343b68f38faaaa66\i`[3 jU=|/y:Cf vV{A)BJȻ@5p>^l#u"2>BQo!z?Ona"\Ag\-L= aw $-q_ydb%m{aнs5Ԓv]q|!].-}/nBjS֨ 6 +5tMN-[ˑ2?p K-#7X/g6s7[&JLʼ=„!nXkRIy'SV䮠K_ @|| J KpK73E9/J𕢺8q&o8|Q&{KTh0%|-Z慣tC)Rq5 u+=0[˦IIוs27I*Ɂ7Hٞl;[ ye lb25c566ab1cf5cf40b201037a57fed74ca964c6d29a89e0285b86a9a2bf18db5ԇlq5+LAlu_RXY)uǟJ*h"WcHPG93'z߅S$:񞀱LwcFm'V$ˣ"  7t*1 UH%-r(Ƣ#|qF+[[=R#噪^Lu$C^.5r3ʭ&m_ mڦ&[8 jerj-s;Is-ipﱩbGdEdC{c=0gٵʈ{xb ņ~`I4Tr7_I;U8n BI20c" er_f6gh ~l0}zs~|B) (_E_ }AWO8{1 ͪmݑOw޹ij {\      yt l9a12d6f59d0ce3ea49534c0fc02f5a670156a0b46abd252f541a697a72012569Ƶt' ݼ{H3X`p9s].9o\6ֺRBGḲ\j.`b uˬUzʝڇxPLhAZ ʌxm,9?|;>1%vPZΟ%H~h5m]%}v'߭ Y#_!w78B%‹2}kQjv. lq#G|0+FQ2OG4fډWDaSbf`gtsl$tݫiǸt7mP X s҃׎![LŤZBq'%A%!:!S&WiUL;e!5 KQwht ߫Q&6?*rv'2:ys l6b9a7b99e083d7a8097f80f38c9235d5e7559f3e16175a63df53f7117624b74dá;އ-O7pF6ˢL(ĝwBրgzf?ĨET߆+c:k)r!৳G+?%r#TWĂkwnɥ䀮ej:Fu=7X'"c&z)&Ѕ@ U{M-XM"<[ok}Wq-RflqdǍÂ}+Ҥ{e] fJv@ ~~:{*^9"JZd >+Q:^Ȃ+?u Q M]2:ww2-dp  .fS6\ u=,x-gbwWDž_ҿX3~NtCU5"u@Ǝo)l\W՘ I[,(Vyr ld845efd8b7367a4b447c631f2a2f823ace91b12488eb00b069059fda9e1e55f18qlǮ1=~lB .B]d2u&ZSV6EY"$~fK s2O{-ԧBzy(26՟CD/ЉC~12 ^hoov`~SA[[@YR]j?λdD>BRt@[Q 5VO~+ueHs0LchD9TL~+iReOD2 {뤧sf c&8Su9TaܣL kݟ(|Q{ dE^xS/D`:*w*Q?w $b =eހ>." d [iCxm0/S:so^9s[E!m8}Iҹ%?7L>{탭Ӧ *byq l4f42be0358dcc35b966f39440a579a5442a29d7d14b80d6ab14595525f58c709͆H<I xeۆ5Osi-L^ݷh|yBTM\~bUQ@o7zcʢj(N!7yEq- wGBܔԛ6emTyS"VEkRkz:>[4 o ֹK*aQYh5C>LEK-oA <H $L?ze*閗ޖd\r}tCA w(#mhθ5;L؎#9f %[q^GG-Cm=Ʀ;A$.g[Ű*6#Ÿ\oEZ3q9#5NHtv *Cqm}N$fWdtN[>+,қk1ҿ|` SJKF\IhdwtX]t:ה K ʷj8ؿIk5NX $U7pRD*ˁ'%u[DJߛ߾FSQE s lJor)9V^m4SNڕm.~c֋"|8KMVbU,UdƄyo l88dc3fffa33ad6388d07899197ddb3c0bc870c25d98b822c331be5b2036aa8bcA8 Zd]Nש;]pGqS29ucU}_:?t1BO`ym\yp"D&d:+L_()En~@)lG];Aff3-1KB!4t5zxL\PI>PIl1nd; ,eD3Q oX1vwe03q 4BUnѭ*zdd."Xoy 6~m"G&"`tAhO )+$rO0gM)KJ}UcBc({" 8>'rP#=b 9/oR!C ܦԯd 1&ϊ&u_9 /4vT\*Qyn l66b3bcdc68e018560b37f0d2d0e6ef5eae6d06c915a9b542d43204630b142caa@WE?+CCUfsґgk7D}sZ@%oE?|K#*X?WFgJ00pɧQJ MV$ǴDrWv%_Uv0"4C",)t6= decK9}I\$M`&1/POpTF,ZO>fzqiW q  ug`i^ߛ4-O+_ 3wx0U!Ol/Cl][-~qc,6 p-~CzO`Hů0".y'X.}hܼjFQi v~8Y) ;0" dƈ.ݻK}&VR#-/s6:C, 6L|@L!Ur&M L˦#ͬpbAU&ŷS=&^&q^ZOpxFIteME 9Ԩ]<89hRSf %a^}TTs @LF.k%S ;t.\ f D  r , Z  B p *XF d67e773b894c404b6433d2a1e7107551a0764c36f53a88d29ebcd14a34590af8F d901d32d92796389e9b488daf8d130f9f44fb0c974c1a688c78e6274a2e53ff4F cff3c309aa2b33fa2a18c322f207917832fe41f26967d4781d422ddb511d27aaF dc7d782dcdfcf3e716fd4bbb4c34f7346782f8d5d5086ca20540f27b752be20dE c472c1944814aaa582a462a87e7b785bfd8014d142b779ed5c2be14b01d1950fYE c2def42065b74cce02b3981350ced000922c415f7e39988378e232113d818f25.E c157bb4f35e56e6379e382f78698a3ca86f9421602d9bcb5a343b68f38faaaa6fE bac3ec6b8c1f77f08fd2765a58d03a62c9bf0602c733de1dd06cd7ffb42cb47bE ba91b23f1c2813345e3f5949822f83cac605d5bb4cf925c8dd1b271c9740bd4eE b90f3702085c1a2ee01183a14c61f56046ad8604f846e4f265a337a68a44b2b0:E b590ec0d00e50f8f45242082f1f5448a6dcd212f9d5c48f3d6684a07102e95a8aE b45a82656fcdfebbf95f3b9b90f0c442ee5e29ce8d80e00e37be4d7901fbd8a0F b326b0a34931c2c663fbf9cccb7675a2ee45de4bd86e55f7989d41a6849e7085E b25c566ab1cf5cf40b201037a57fed74ca964c6d29a89e0285b86a9a2bf18db5eE b03fff7f6fa99a407eb862569a9ced714eb33296d6f4248f6ddbc03b48ebe05d2E b02e5203a3fad5863880f98adca478d0cee48d716a85d3e856e070413db64b73jF cb8d34490769361a23d85c413715c51b8d288e11909f9a2d28e0021be55806caF fa830b72d2b6719119ea5ccb82b64f086e76ea8b7a0ea6d789b88aa1064b378dE de60a0c2474ec4b72a2190fec09cf1afadedd8e71d765d86745a4996364044a7zE fae08bc2f8e05f0314a40a7986a2a0f026a32be64d52df960a49d1d358eccf551E f7f9501d548ce38d9b61ec540e6cad0379160864a2c2d71ab4f95f40f94ff6e8FE f4a19ed7d62035ac5b776d5c39f7c20e1d3c4829bbc6414da20690ed061db4b9SE f3eb1427baaf60a902d267c0e17c9a919badd0d74f46542452a85bb7ae9065e2E f33030272d3f2de98e4c3ad42ffff085900c07ca8bc34f13bab6c91a3f36063bE eff01eae314e2917abb27026df89878360672067c375a1b001a9a92d2e97ada6+E ec20dad92ff5de31cc1acbde8e9de05ea2d5469d44a796b578a518d97be68426'E ec1b70173223048a9755153af6e0b0afb6a5a18fad1b7be0b0799fd1a25b9a42"E eb942c1c220152ad76968a9178469101eb17d73a018c4e13f1038f2a7d75087c E e7a19337cfbc7b92da129b556079b392f192dc95856ffa25d72df275fe65d1a8 E e78d41e1b6c427d5a904bfb0bfc089f397ea220b0eda113e58a0ea6e35ae50d0 E e66413123736255fc1413f140ec84d9ce1dd948005b53d9398004ea4915cede7\E e2e8bdcd0428a2ebeb7182696be1c4f838e1d3ee0f06d4fc5a2c4f179da50a70E e01cdb49ca6233e403957b852509515dfaffbf88d28ce47110cfa2337a04f183XE ddf5c0cb6b7915c609948dcf636f62f4f66de40a0aa72af99587266dcc11d777E dd3af1c0927247ff9eb43cbda4b5e17dfef819798bf67f559fd9492bdff3f624E db05fb5da01f592834d867d00e1acf4949aac85ff5501394ecfec485e3a6a0e5gE dae4943fa8aafb7b2871e30e8a067b25b01ca5e7d5f08215aea9ab13ff322a36ZE d9131044441207564bcef9a553711988383b976f7420aec3052f4367d372c53e@E d845efd8b7367a4b447c631f2a2f823ace91b12488eb00b069059fda9e1e55f1rE d7e65fb3045db417c1ba681697234d11207264c0f73b70c95782744e4e31785b_E d36c0b7cba799cc0ad4859a7918e5546fbc33d23253bc439ea3f21fb21c93c89[E d314d5e6562e3386de71c39110f08a3f1bd6f261f945a44ac069c1ffc21cb194iE d0d9b2cedfb59904d629bd606fea7ba2f45fd819f62bf907ba14fbdf76bdac060E cbcba63cb3a9d2d6d8dda95bb020201fd323942a978578f9100e9e3b2e086026AE ca87f333cd89367d0b0ff24c503c0dca5e1a9d364cd695b9abf531bb9c5ce6c9KE c65b911572c07b7ecaf2dffcdb3ddafa1ec49bdaa21c0fb449f9f765b7065a88E c4c300479bb63a00417dd4e9733e442aac52d49820ccf89244ea11de6f4e54f0V   y| l41f029577bc1036256038c13e083c54d4bad242e20c6eda7aff94b9a973fe03aD5g0 _jUTRiQqKUM5~@饷(rIIk>+~{ c%6BdU={?`l 7 p8v4jFuQ/S|H [pؠ3 l@?i HEiyƅTC~vKMz}@M+"K_\;[]!c fH? ]!z&Zn27,@]<Է9g^(Zø 9<Mklǁu}pdfX#% { 9fb6e078632bfd242d1acb8da01c9c3c6254a18de6419ec8c1ac58c2ab680dc9.rǁv\~l+~,5w~witFC]3xI2]]AZ N9y/݈/5չ%< t{2"/1vWFi?l$}I=C6ᄖLf,B*cܿ]-o@<[ֆW7s0!Ayv+j 9A 􀒈9\-{Izl_֎/#TmQCkrwsO]V tlncY٧'ћJYkD=$]y{8o: 魲K [ԁuc& wFDWͮ3fX-vO] 4}!٪|oGQ#Y &v/eæLۣ@eϺ]&9m T#dYvp|^ڶ1yWu۱G olE|@NVC9sMG./0dṲίr-YE+PwS[cQC8Noҙn!aNэ uV2VM/]VEfa=-v#2'l?;h{ЋsB1-h5qiEksm A-rRR5|W/1C`#2RGlEQ33PWT R1J79(Qyx l6fb24a8d6f4e95e92f72fc1f3d96617eb8efa94cd757314c8e827b6bb8a71cf8l h>$lBYS ])Jkj' _#O562ьZyw/ FRFOO9jPZu,uU)1.CE#xXTg l;ޯ ?щ\r∬#bR6=_WMn߫vkF G*gZ;^9&TKL2+tM]m Ȱ<@4{GB5AO 5SsN)>zGҹ[u+' w^@i)V¯?}|x^yu l127e5daa3bceaca626bd0fe26b82d15b9d327888eaeb8a330f5dd6cab1e157830ߤ Z  (5PcZRz14eXqDvwSlޱ~PEcyT'rN"I2#EII@n+ބDgT@gcl^+ $|=+a (q{H,㛣lzC$'vHqP8֋ߕnШ{/ 5ϰ:AђO,D=h_\z=f\NKylCX/WzɡSsJ/_5qa8?+͋E 2n_uoW*Fi$!zM9~kbY=e;9^. !oSH,but_d q@|(:z/LVm#44@eJvfZ`䷙|V>eA !  !y l121236764f307f3343859b7c5e7b19089189d89a85a12c70c775c8de83412c79Б.n#4S,J_"٠v<|g!! 5y(u`Ma>I],MUj~xNqR"I)bu64_0wV2kWo%DZ#Zn@!wX~P-@҃>ΜG^%ϴ<*@1]k2,)Dx WA} [a}lv9h|\k?%)UR, MڳS,}Y^Z1*"*(! 5-+y lb326b0a34931c2c663fbf9cccb7675a2ee45de4bd86e55f7989d41a6849e7085 3yÌm޺UXMf9i'o:Em90Ә;^my`^2*JN'e"[%,Q0WV+4gҽǏ`c%[{! F[ӊM7 f0BWjM*L+^503*S-1Obͮ4xG1NV9hab7y#9Q-޹ʌ?$_W6e}=Sc>Cӫ֧2o闹 E1- xYLQ_k\{~o@ttRne@b چq ] r2u `FQIp{B+8:cǸiQ!{լXp ͣyrHJrTxB*y}y TvH(;+%1}2wH 0\{ŌM) `O= idOhZki>:2㻝e{ 41lw'ɔh %y l06579465fa9a6eadd700259c73d5df4eef6baaab034b5eb5e882c9f521a54f6d]%`@;/ʻB SΧOB3l |=Y0Z?WVsR[@+[RқuH?}D$g婏 {UsM*wf=XTS7>3Kǖ2.@JakչCǩ*[EV'TpaO/4, Y^'z?_M)Hp䟣l>z}=g^5U_.WY)++ECO 6wzMqMp)yQnQ~/9.r廵r_AV05S^QG B^g kqoLlXVN5k%y la7abef1688c792d8d014d6541785d236533736bb96307b298922a307293ce3dc9h9j=75hc͒Q{r/g{1] 188di,J?Q$pjF5D߹+rtȹ.c@rR%Ɣ^2 c#E/"d)HM^Vq`'B(OH{b4a!MR<*~bI 瓂N01}:\`crSrFдwCp؜V)qjvtPGHXR_5E5k7 n҇(z~: "3d-Ŏ%y la8205a5dd3332f0684add4d7a2aad03ce8730feb74f0371ffbac7645cbd7e1a9p[y\Eٗҳ;Ps]z1_/qvW8i(;h{yH=+*Xf,u[ ^aފ`@. ѽukȹ => [^H&7{gW!b-Qy\⨸O%EtkPuq+(͝f@ EǸc{%tˡvbjCR ӫ=r`La@Y;\\FC*Ǖ1ib/2C`FJmb ɝGV5%y lba91b23f1c2813345e3f5949822f83cac605d5bb4cf925c8dd1b271c9740bd4eOl̊fJa0 D_7ۦ [K!ҟee`Ґ:k>DX؍_sΣҠR.Q^ڹo.sM-uh'S;̧ f3lƞ]QzG[(=}osZI'D:%^X@^q|fpmtZH(N"9h |\[I寁-mvn/;r}9#40c 6;3 0F2M֬"`8%2F e..P6aE~3r6aF=\ä%y~ la0d175972f8c472d8f022c69f777d386056761ccc086e5a47870b968ab293b63D\1]7f>6$h^qN^4}8ܣP.77݅NO Lm7k=EitN}yN[hW'=2⋮q< 퉨ƹ]ʉ;yL,Z@ӠdTaWީF ӛ‡J(6!JݡiTȃVN^ۍCX{B!aWtNYjO@A"?I4(\f Z."SmzK>㫲y =Z,U%y} l369fa085d6a8a1a3af83b4054c8631f6fa11b7acc07af43034bad0f4fb8e3867Sޞio?P!JsK;'W23*y$ѯ؊SYAE~,`Ý`f j PhΊ2>/#-XX)lՈ q/Pj"`"\GfK'ziAւ7qq5"b,,7Eu3D;FhiJAAU;H$v|@PRV:6BQ0`")pR7D5%   y la04f7a4b6b7b9eb10f34e8ef71e687a5d7fffd008798d0ddb4b54c141fbf3b24w󅜯cdx:Q"/}RȄ>]B٤bP.FìjbښH@:u=*QM6^&@HJ#co_{y۱)\ GR`{~QA[%sgnݱL}@(p@5%uG@hYP"ZZpq_ya)$m1 ΡFi`e,O uPqkdGJmK@C7[_-@Ȅ J% eے^ɞFBgwOms`]׸=y l19b7776766e3acbb02a78c6b7a56a006f052d0abea7b6d1a5cd73c04eccd1149.Ј&`Em*S`a[.WK. dZ7Aj ꎄʖOV|qbƭ bnsrL!sqc\Ĥre1V hTCTz jh^yGS("!̏?8o1Qrm4KmlrEI/Octy*:g4@b^*};awT&mo jtYȝ INʌD2s'ا DzE7ҥprns!\=y la2a4f3201d55e5ad3401d441d83aa948efc223733ca0841de2c3b514e59167bf!DWˣ\1S zwPBCj^P ѾIIX <]ʒ5 ,% q?(bCΗ!^V /Q7:Wsu1WalsS:y_G.D);; f6L |}J(W%SI 8H!mR=li{q ~`z2V,םM$E"=y  lcb8d34490769361a23d85c413715c51b8d288e11909f9a2d28e0021be55806caW%k.͵IVIOK>(AMDQrW󢕻6%pbOgBk15Vs}N62I"olh8_[ J%k7aSG>"ۉDLv Ks{5S\ #D Ť)`HL a{!|tIv4N5.Y[D@PweF`x#h**r7BRa-\}*3P2):_{jtm查ĉfp'0ւ9I##9y  l3b5a7a9dd245ff944827878e432427a79b46ffe920ee0b29fc982826cb632c52UW* Fz*(Ws#9m-Cz5#.Cߨ9>vEuR$XZ+_<ʼsn@s^h(5~Hn]5͂ea3XmwZIVXsgrOό):55CYEb#T]'s(I4dÕ@5MR}>Uvq-hnjQP8]lz?ϣowuD4R:CPMXjw`BilTc^5y  l5d89e05a6dd48efdf391a577d73c5dd4881685eb0d163a33e0225b7e52f6ca82xk \r$Jd]|Lop+\UY@ `g? ܹ+y`o4E(Z5=d)$T/p:_`l$.އii} 1C\jYSa/޶s$pgUOě \ ܱseڅ<xCjBoB%[ևzJo*$JnZ #e[6c]M.# πm[yOsQ9PO|)xawlٽRi"X?b,-ՅtjFPəIiGS@-5y  lad78480fca67717b43dac48bde0372a99e0e47b03f20eca21f4150e6b5fa3055/<‡-q dKa   FYE_n{Ry2mbxnϯR4 i28`] ﹴxzE(|=ы@$+Z1񞃩,r=dBVyF*~=zZ^Ij : +_lP#PaI7WSɷ#z)6%>Q G|?/#ktML~|F!/B~] qd|odWb('w.1YwaGp[c5m83gP5U 9tMD dyŲ/y lfa830b72d2b6719119ea5ccb82b64f086e76ea8b7a0ea6d789b88aa1064b378d{I݈Ivkꂝ)MJg>3I񅇚>guyш51`@/N].0\ގշsHMPAf vBtj\dӔ46"\M)#2wv/Q!_(Ztk?(1YNŸ*\i>ikͺ|EWFWLhҷԖN{GMzX s$GbXc'$2~pdg`Tihn EL-9YTF/|$൝gP{C"~6i*d/y l5b1342646bc7672041fa671b6da2de41d089e20bd249de0641b8f5ac00212999r$GOQ@}p1w' 60s7YǾAhlm o AU@-a9'DZ,*RIw}5%_n[@_sh?empZFy oK=L^9@A.yUy\m<|n|%hXgLnS6^PAh/0@pI!Mf,tȠ&Hh!O~i8WY2bw>k؏=H"j`br?O[5-W- D"`=pqݖ:4x-   u) Ld67e773b894c404b6433d2a1e7107551a0764c36f53a88d29ebcd14a34590af8#tZS5 ZAƾ<׸ъJ?yA#B-@5v~KZU^uȢ%Q9֚lyD^t#QrͰń|ž>l,9•:6dC;L_t`I^Q 0h69FΏ D$t"+>|& +\NǷa)"dl.+3z*4p~OΎ[ ,u&*t@>0/뇫y,Y'LߌyqNt9P%3H37W5Ts1 ́i8xGҞDÆK"PU2D!Q(ͮak>k>sHFKY=+OfPp紤#Ql|cRLZqO~Ҿ K^Av -O#O(^)7 "RD~a=g L6"~Q >{(N?X*Nc'`dAGA9ݭ ~; %e#ol#8T/keU܀-~34iIߐcP6لŵPme-~ʧLOz¹ w ؽ#Ancvٝ_*ar֯’L7Dy5$?Z%y^/Ҝ8H8K:nz x#e)jxtjg7j8$Ǽ#D{^.s " +IL[Nfz/xN  19412649ac94c1777a7c568d7de9df4dc35da660f26b00121a081d570f255ade< }tR՚|G'4b5!$UoIF.%x] V˖֮Ofqn ';) N9D d D#f0%|4 f{;di*0sjHS@m:[MAB'. $B0}$.(hЦ.7z$')msr<]lr%(rcmYfpFP'VVE])ͳɂ4/ 羚rI֞Z}4j9=u,-tUwaqqh4_gYXgbq!0J7J  d901d32d92796389e9b488daf8d130f9f44fb0c974c1a688c78e6274a2e53ff4,b'+]u)OxS\:p3Ickpbt>16 Fش58Ks͈.NITL1ey"QVgUMpLjTk 4\}F.G~-.Z|GLn`\ UN2jmC-sbhgt nF,NZMrz$ 51J9X 3]{'V}(2{q%O(rG0"fz~]Dy l2e482f26c0666ba1d39dcfc2240a1c2b5e4eaa198277f7723624f23004edfc18I`Vd_~b{gv@2&dEL~;ϡ sU̽^ `Knh.y. x;I2^%z#cq*4[2{.P%MkD-ͧ^<0` }֞RJHs=|._Tq" A6;_,)x{Ho?-|L .8sxI*/*I=ϗ>nHM _B ];c WgzBy/wK_sf&0= u`uW*Cy lcff3c309aa2b33fa2a18c322f207917832fe41f26967d4781d422ddb511d27aaB78n  vt"^"+YU#@lji@QL³߸rP\:m+(}ɋ4>ۑ9By ldc7d782dcdfcf3e716fd4bbb4c34f7346782f8d5d5086ca20540f27b752be20d+[XbR4V/|p-4+R(ͫ߾r)+z1_)(4s_@v32dI+BoAZ6+G0粭չߤҦL&jw~MOw~NnJWl'Ĩ:w_rXRrRhh KcQ&y䮻B΄5KV͜^gkk|c.ୂhBδ^akb|Q<&:WE yL&Op3VT0E A ̂]Ї&|f%Mu(luX$RK hju$A'* nףKa]2017-04-07 14:20:57.643239u , A07ef11312cb8a3c88d1bbacbc9292d0e03b839596f2dcb072e39d4f7d6d464ff<3 6N]^lN O5c޾x p'YkUS% IL⪭9i.A3v|:l@x!(3=r*d|b$.9Xs*/Jr&iL:%2017-04-07 14:20:57.643239u , A9f1fd3d037d12a0927b0cdce7aa01bc14a0fd37d2e320c8262932be449c49c42z0UH(d3Ifkwu|qUFHq[[ ?F>#] ա?9iZ~8N8k5Ub1,1$Zo̠I[ #Qv+hDꈫx#zVSI&2017-04-07 14:20:57.643239u , A36850718f8afee5a85cee04f06547d33702edca01a2ff109d066df20a80fffd5'?#+c!DNՌGqʍO}]ۚ;5OEo׽!nQ)6IѤ˟K|bj;~ֶl T|W@GcY`b 樠tFq!$Ϝ $%Sjrs;2017-04-07 14:20:57.643239u , Ad8b7cf02fd2bd3ff8e0fab5c45fc5dba071d58ed5963ec5e6647512b7a0856f2]Iyf$WHqImmt {t3[ הLW$ E`[C0-ۆ)Fa0vAdx\6yry! A>*? 3h,k[,{cFybeN=z[2017-04-07 14:20:57.643239u , Af7a08765abe1d287bb10149519f0bed9c6aca223363244160c0f6ea7e7bc8cb4Q2 Jw cߨ\#k%./5vӻxo:!oщ EE\}:MoQ4^zuFg.SJDj:W>5Pu1*db$5Bsc3k)2017-04-07 14:20:57.643239u , Ab8661aaceb8054f6d82aecdc61f32ea924bf0d47352eac2440c05d000cfe045e M&+Ra10Sbۈ+? tE~bZsԏ8GrAsV BG~LALZw*[b؉mJo#"p5&%/T[%= OaƲ\5N2017-04-07 14:20:57.643239u , A3c7f642c8e96b7220c36a01eab1ff2fe55871937e466651a2af040967db4ef87poݾ>aցNnTxkJc2$͛ƽ+)V PC.hF[]zpVid7p/&;sԅVs( K_;h|r#Rd*'`Lƀ>f8?$2017-04-07 14:20:57.643239u , Ae58742025f6814f2fdf7f11da56a39f5046056adc9192d7957ec7d79ad89bed7 M@Ifu&l-~t Jt.tq9Wn 2TM_t{^V T{f!%EAŧC3l:'2\ݢPDrRSgLfD2017-04-07 14:20:57.643239u , Aa866e8c2a8067195859241fa8e1671c5aaeb53608d8e4f39116445c6d5373cfdֆqֽh|_.ޏ;aw<&gWVT2c޸CoEji~fQ:alq2I{J_\L`Y+S4=Y)Z]֣c|mq@q+g2017-04-07 14:20:57.643239u , A4befe119bcb3a80cbcd462805afbdb40fde654381939ab2fd19e0fee7c9da686dJpyQ$nYP|A%aRޛzP3!|uNSN-[-D 9EGs ^<C;S-/˲F ,I}x<= d62^K<3\1yfi;.\=2017-04-07 14:20:57.643239u , A631dd1df40e9dcb312fb9a71bfc8033b22dc6af311e534819bc17fb3d62abff2zeg. C'tZ'VJ}wzsU+5ue%RNumfFv$d \HL2r9}S+ޤ[bd, O^|Guam=Yu4"&B#QS>mX(Y2017-04-07 14:20:57.643239u , Ae0e9c7fa4401932a030b971f266d672ea47666605a904c8918111ca959fed022pIg[>GfrjxR;E DQqpϣ.(3󪜖TEwgdLѴ2HJp Ofnwo7ƩR+лBΞ& mqMCUO.DmfBtk/;2017-04-07 14:20:57.643239      L A6d8399e1178d55b097bf0120a86db479d8d35c481874146e913d3a47c5be6030wf-BFJ>,aTZgb:1OH*BN v΂FB3r`.핉@d!$!Y*_BjLtpZTZxhC~dw>ͽE_A<]0xΆt,c2017-04-07 14:20:57.643239 L A84ee3d0acb79ae13cd646a9e85123fadf6c4f608949a403f4fe3bf211cfe205f Zy=dPW)b0^&>foF3 `ͬ4P*~A/y,N|ym `(5*،Z|S0p,?pG5s0z#*"a$D}!Gx/2017-04-07 14:20:57.643239 L A607b102d060fc3cde998293777f06cedbca210af7c34429dea21189438ac9370V&yi㜥:%2 >p}ﶂ4:hf,Fr3Ƭ]Spƻ@@vw<I$dne蜪ۜ$^E}v5}iKZ(2017-04-07 14:20:57.643239 L A47e175ee995975494a9c6e633e9b1a55a8e6a33ebdcf9d12dc4e785da6f7d0bfvA8*;OkgQ+vfO/2@ӄtl΀{YՇ|z>n/6:jz [-Idć+i{:ܛkݶqNnReOGP3ɓ_׶ Vh[ 4 h-h) :Ҡ2017-04-07 14:20:57.643239 L A9989778a8c76e2d74b957135530ecc2d1e561f8da8c06152e70425d9c351673dS s%ˤHڻ';4W}mr=d\SLX?Dw8&JKej6tÀ/8!mRA'~%4GRp"'aU=OT+]T.ҝکutWfA~hE# , 2017-04-07 14:20:57.643239 L Aaccb4a8c00d2ef6bd887a6d09ef7d7a2ff7d7e55eaae7e30a4b512898e4238d5Bаk;qiu=X2{@]9.ox}/h ܌{K?TU&/k*l= .q fcGo !V p]1]# GW|Cjv tݩ]n7b7t'j2017-04-07 14:20:57.643239 L A79fcc86191625a5ebd085c54d7e55230cdacb334c17ee28d0b4432116575bdceQT(ƕh DZ nNUӠNM.cH^]c r[W-ܕRʮ,)5p;<եeMW1GU n4r# aSi{w0!|2017-04-07 14:20:57.643239 L A465809d89ce8482d8f352b9cf14eb42a5b9d7cc9d7ae81474abd1b00481f9876ևk.[Jզx۩#z9T6Lsw!dD [u,x2$ELԺJN2 ',N 0ꢺ؍= $`ÕK|o\ 9Cޘ) @VB_U wX~ۛC_Spˁ9$2017-04-07 14:20:57.643239u , A5cf4d8a4cf3ce88c6a9b388442f3419fadbacb64fd1e42fc471ac8e15bb66d9e4)5Z1A8ut\sC}Bޟ5P)CHF  $HcfvTDՑWDZ.+ڲMIUm̑-aqLIW[c3ԨN%ian2017-04-07 14:20:57.643239u , A0b170b74d58b9b723df27a582da5caf26afe343784539c1f6d8e761a44a43d74("(E D8IhÇtoEj^yo-&vEjʙGX눰S J<)!1UUrcPw6D. @a !qhP/-U+X~!,媂hRmQ%;2017-04-07 14:20:57.643239u , A28d97c90173803ef83cadc3e2ba8d6f4f2500c56e53713051fc431d5c43adb11%6Fġl{X߅P쌚A`yB^tG() !Je 9! f?x$8wEN1fss)6kHؼN'6`)w)G$nWNTud2017-04-07 14:20:57.643239 L A58ee5169893bae44cb4f754d8816c762dac95260eac51e3242db3545e2eab8774T>+!KL[Hgeݎ6UdsV5@N|sTfbD:Y^հ)m@zr0X<ղJAsHcʚ3gRXQpDdbn\1$ F,B~u#uG#9ܟ "k2017-04-07 14:20:57.643239u , Abfc0e552f22a16b4cf6d8b319dc54244acb1324f94431666c229d22c0009d123xF#0HT.ܲ }rA rۃ ځP|:fkZZ)jA>NKˉy#%e A"q` LGlNy!zHȴ>Gд?!y W<f9X݇42017-04-07 14:20:57.643239u , A0df68f8548e284d76e7338804a4d6934cf9e1befa82f74fcaceb436fc266f689𬗊\tOĀ4M7>h4t Ā 8~iud6%^bQNs_n$ HA"++-)io6m.̾WE6}^ͷp%IjVͣpHx'2017-04-07 14:20:57.643239 0  (0u/ , Adb4c7073692b36294d52bceec8f39002ad2d2a892cff99088b7070587ac3b783=*vc,®y5)PcJ|Ľ173(0Vm r;JC| ~J($u:ߺu/ᣯk pN(+b-B {5Fwe$ u?9a|^~,/ C2ܜo^ s2017-04-07 14:20:57.643239u. , A4062e64d95986fe41d569c902a145e8fb1028f09aa9817a1b7e24b1fed0ed919>^2W[h-!?cQ8Dٶi}qE ڸ{i'=ƠG{cKa[UcS>xSJ5w*A CVq * жݛ2017-04-07 14:20:57.643239u- , Aed9a2fa480742a9231d2c99eaac2c61f7803644ef96a4caa5e928ca0fe3fbcbc/SihdTTdg8R?`` Jc&U"ܢCXoH띸h&  z#7v7U&WkH]'~4Ǐ*\34h5ş*2017-04-07 14:20:57.643239u, , Af11254e639c3a3b6ceafda303298c3359fdc39f3ecf0ba2c4985da2f7c0493eajajݴ $A LH;RGUӈ8ov ?M.%؆3fמS=K,M6> =b6>1{2g`퇦:v ,3jG $Ήe S -w̴2017-04-07 14:20:57.643239u+ , A373304895ff9ecf452ff59288231adc1d6e62de4b5d8b6f82e662ebae2c0ffb2DAӢovesEt?JIOsSf HRV`ш| eyx.}a!諾쟘Zmb\>=`\"R +? 'ìA/Dl׬ i5+n0kj2017-04-07 14:20:57.643239u* , A9ee5e6f2ff5d78a4c978280d0a35ebf6ad7fb12b75707cf8e48ebc02492f7bb9|/}߉፸27l{%1h'h_] $.-=ݐ/GHYpFkav'ڲ]!}v0&j_*-H788(y8B'ZsHK'2017-04-07 14:20:57.643239u) , A8c6c2b8956753f669fd4725b5d5ed320175f401721883629062f41ae6872daab(if">(4JwԨc 5 qQTxmQ8[5ٌ16X鰾 Y |:!m+m@Eyj00}$v[ArǢ~κJUFÍ݁jh]XHD:b Q @>D2017-04-07 14:20:57.643239u( , A3444236b55712de38453de6a4889065953733d403ca937537b1da028b0d40de0ҪiPUxycVQ.S4$65~9=[8ګz33UqyQ@eʓOKޘ)xJާ'BaF ?cIx`,ؐ>r]n$7n ωYGns .FbRBi_ =2*{2017-04-07 14:20:57.643239u% , A67444822319aefcd1f96becf0123284d8deda0c53b3976caaa03476fa444aa72T7(R;^tl`C}+'$]] 0':iAMm|F\45i^L(k!bav!#B+mqР 8dVw΁s !٫ӄB6؛E~2017-04-07 14:20:57.643239$ L A4a40032954fef3400743a9470c1421b48e5fec796c68029b06783f6a0ee78953ed(ɯNs9_*u*Ƙm)H&m8v["LIEX >8&=eTI.rGvssT "iAm:{_Ԝ۬pLTdlR&6fwa(-$'spBNZ2017-04-07 14:20:57.643239# L A079f9e63bdd9c26bde61a39c92530ba313db84cf020aecc7099a7eaf17b3d48aT%|w_uFucB_F8\?qJ9;!{6-8Q2?ЋOhjnק M\|bÍP *Q Rk)< #I`o C6ŞTu N<c|[w#>E*2017-04-07 14:20:57.643239" L A81eefdb137ccc2d662a966678aa369f3d82f19c151b615b9249986d9a62d57a2Wqd"%hWQu'7n\yţ9!L础n3Lb-ql$og50̘6uDklz[SهN; j$;zqT=NUVq A./2+X#gg~im2017-04-07 14:20:57.643239! L A1030389a6172dc23d8bdf03fe71aa3b3869535ded493541dbdc9f67bb4ca7fdf{a: 4HY LRnQT` Ff@%[Ebu9>QNOH_8,3b)!%Ư ңU=d&#3L-8>ΈngbR6**}a1dȍF§,P+32017-04-07 14:20:57.643239 L A8a7174e11c40a8d5ec153778d98826ff075cc8f8446d897beef4b9529cf0141c"0[RmiPQOT3%F aɕ&xMJ *"vе_^褖a`Ө2017-04-07 14:20:57.643239   ( 0 8@HPX`hpxu? , A14f02e1a753b21fa97c6310768eeba4fb73b0bce5ad3ae96eb66bbb1b443f3f2%V-Hmatɹ{yȯ{@Xp:.$ 6Gk8?PS,Ty9wS>,Y&ƪOKÞkǡJX/lg:"/T*A#ӵwp2017-04-07 14:20:57.643239u> , A2bd9c2d9440de2b5c381372416ceac901d5af68ee1b768acfd4b7191ea015dceVw]sJ69F3o̅e":3gDHӢYUP\tBBy&߹NA*&bX6C=з%(Zb=e ='Q/?!t| կ+{2017-04-07 14:20:57.643239u< , A0b76e2683fcfaa44d2968ca027dc6a4ccb3b01123198156d125e309797b516ccXn*LJXh bH w+YT<$ sbcЊ`pV-7`<&Us=cbڈj~ ;N6`%B79P@]vpc'{XQst4$`eE>Ǻ})+T.}2017-04-07 14:20:57.643239u; , A80a96d9b108eff2ce0e4515b0f6d579dd410825bfdc404d4f0569f5f72f68512kOc9B @2&ߍUi0E|;_;41bipޱA©+EMNCGQ xK'iG9*y cCmcOT{. jߗNwe2017-04-07 14:20:57.643239u: , A9a344c4069e977f37e4e164060a609c7d43d6bdfe613bffe8fa9db97fdc19290XMApᜳ#!&dww,)s.)v{~o3C{8ϡ^;H|ï\Y{IcW08,ٌDŰ7(k{>4wH-; 2\w@H2017-04-07 14:20:57.643239u9 , A88958c891114d70a9042b196ca47865c8b589971bcd7ff320939918816a49f34,F=85RadѰ 磾[l \"(N͙P[?-PΨZ[("z {qUu1/qh ?ɰ.e9< `:;Tn9=nZSYhPA 2017-04-07 14:20:57.643239u8 , Af301ecaa3eca291a6241e965ac8035de0e2f48417840a3ea3742feb4d8b0dca5Kl݉ AaBJR 4K v[HC wx{ޟ5PiY`ׇW 68iyQhnf*~"z:^CϞYjscw*>JzeMޣ^H"Rp<72017-04-07 14:20:57.643239u7 , A07e35f01241a763f65668464929509a74830ebc2590a70903f0cf48cf323f721ō^%G֝~Arպ)$13Yv ;p!!7T<62gЛ$/kѩ jЦ@8"v%IMOu#xju o%^|3FSC/ hVJIiqX2017-04-07 14:20:57.643239u6 , Aeeea595200e3b03ef48b39a9026b69119d12c18c6ed54410a8446040103adeeeXOM|m^Nԭ768KWP^1+TlR5JSw;mz_JKAgkTz)J;(ubj7 Nd7GϴR)l7 e"[[w^8"|](g.9˅2017-04-07 14:20:57.643239u5 , A8afd282a38210a8abd7bcfd24a16ce7d3c1815f310c19f9c3acc8f374887511e 0.rOlz D%4TĄZ40_KK I\?˺p4rm0 aHC [XԬ4&ιHZWM|>,¡nOݣwLրWt1VM f] J R]T2017-04-07 14:20:57.643239u4 , Aef33ed5915ea8c34fa559ab014780bdc9c9a007d0800d13254dd4c2f8d7fd2c3 著>s/DLp`=aYD=,d-4̌2ad40go7j$nTfVLzbŞeڲ9?rL[HG1jD!VIܮb k ecB嚄fu> {oPf#ڎ1"2017-04-07 14:20:57.643239u3 , A3ca8b4f3227419f5b1932ff12dd9b27e1cdadb071918df8db57ca9509a0bf329R"qr8 :~(^!PrZ4;mx!a UԚU"(YE]8*0])e2Ktnbr;Mu#;XΟZL*В\70$$Ҡ8bh?ו+2017-04-07 14:20:57.643239u2 , A6ddab0d1dc85ec965cee35e95dee3d55a79a9053ccc28399aec0797cd0938dc2$gݝc\ԻӐ+oxOF" -.HAX  4!qrPb-amW?E[` Lw4EDţvDsB/ݢ1½V n[ER R 0,:!LԳKSyi}L%J"z2017-04-07 14:20:57.643239u0 , A4208083b0035faa76af24a5882cfa5328729a8932bd1c7642f0b10d4a02541a3ć^ )%.w?AL|21g_Y~d ( ha{~ɏיwWeB)Rn(]7t]_0Wb ,DȞ4Liy>˃?ԢwH6HZ_?,̂Rz+2017-04-07 14:20:57.643239   ( 0 8@HPX`hpxuO , Aafe96d3650acc0fb4c044866b93cbd1a2d8926df7afb7bf46d7506d34f837721 TFGJPF'AqgVt<ц+CMKjNYn0;GPxFu6n~ph%2017-04-07 14:20:57.643239uL , A302463456d8a3233048c315376a3c23c822abbaf03d6cb128bd7b02c9d40bb69"-` we9kf0:n8$eCDҚ1zo[NoY7 {`u4s, zg-xd4eHE|NK}}Bk~[:FVMމxV'Q%bmb>2017-04-07 14:20:57.643239uK , A5405dce151f5894168bb650929f35c53de16879e0231924c5b5f86a9cc4a6782J)t ~6$1\`THɟ$ i@  ݭ e7'-^B/sS Z<I]fV_蟍A[ʚQeE3 L^ e}05>dp"SF&ˡ MO2017-04-07 14:20:57.643239uH , A4a0559ee50ba7d9404ec5c62835709ac2c3f84dbd2b773d10e9e3095993a9a61>_d;6AvUkf:G%'Oad\3̨hV u\!RF];tlɈ# @Nx9HBgz8s&TʇmF7Q-PY\ТmS ΍$a,d2017-04-07 14:20:57.643239uG , Ae88f71ebeecb6a0511eb08ac10f13c42a0a7c0788f9d462b2c816c4284e76890"v]ωc`^1,ICMS:(@*smK;+ ~6Nc&o5rw\A4t*5a930F$,o>zb9(cnK%7 4?pFix9c(PKO52017-04-07 14:20:57.643239uF , A4c00da451c2499247b5e24cd3ec6f43883240e707761d226b244b4c59a2b3c3e鵲њH&=Ŭ4XnkȢvv?\8Kx>g}3C}®].ϳF Zcr 2U*BiLuM@MmvA6R)PgEj%MPdJdˮ2017-04-07 14:20:57.643239uE , Aa48c5cc38e8ef3fc7ebdb56dd9f5c51a2a7e4010f376272080d8a52d6b146801+?$)Q`WZ>eo3}͍dp#5-puCՙf[п$#FQe\΂R]`멎;~5c:Lp;7sgσ Ɉg+KjC*!><#稶 c8WDF+[A%2017-04-07 14:20:57.643239uD , A21e8468798aef41bfbe71fc90637db22ecc789506287f3e551f49161306befcdfVڰKl*u"ifFmEﮆF,o LC Zpik'O+&)QHg:'' x,Sy]6Gau(rߤ(t p9{Mͽ_3K10*N2017-04-07 14:20:57.643239uC , A14e382eeaa90cc239168886cf2010f3890816fb2b18ef19809b9369898942c3f71&@V@-6r7:x"~5j3ݥڑ> .Uh}mZ 2{#'͍څQ8uLmauUNH?B!Y!5!7#{?ݣ<8n2017-04-07 14:20:57.643239uB , A48532ee3f2dce83b4b8a3358000b0a20f2263996e5187ad58e891ab0f4cfc937"K5  }F2WTOO`XHQB! W_oIyi6HQb a'< S+mCxU.sP?!Ϊh $u;=sl7 k_2017-04-07 14:20:57.643239uA , Ab7f36824f8a3fd30a2aea5b4644ebc453e9a0e0b2438c8467f269fe4f71ef92b>j(9@YA ź*%rV ҇\О x }TZw20,LG/}Hʠtp}^Xe;,#m5' "-#l+6:ǻof ,ee%i>2017-04-07 14:20:57.643239u@ , Ad2f4c0c55126ec4784a62d980e6c31c6690e87517fdced4e66fd0a15e5c5a69f$?Xyr&]msiV ۻ!Dl|5mP. YT8#!4kEEJnjCe RѠ`-j\(5XLfx -NlY{xؾ 4egy[Ye,3z2017-04-07 14:20:57.643239     ( ^ L A9ecd2726fadf2b61d20e1a70a3e6ce5072be15b3d55a02f1b477b650e5bff475/NI3m^MY`>Cɴ`M5)pN "2XU Fh5C~iǯ(f!d 8N꙯.^ F@f'Ŋ_/^o pą.ƅhY~QF$!ר4pM_2017-04-07 14:20:57.643239] L A319162b60728c960105db39d96520d779422dede28d9593ff7ceffaf21cc3f85ЎIz)[QЌ #pffY񊓑p^[_t 2&`@E-r'' loڧ:[f"γ4Q )bh R<1XijCe'œo\I z<2017-04-07 14:20:57.643239\ L A0b575c8aab82e622fa62bf46d68cd2699a35f8d16ef0688c95b0015d044f57105ݯΫ"Axʶ{|8lEz 뱮)#&Qp9\K)݄8y4%ۛ6is(.r'f(n?d'N8WUcDt~ڈ,*ؚ@>)8h6G[iFxW:cv.3%^%_02017-04-07 14:20:57.643239[ L A5d9ca188272e57f5daf1358e98d01ac169f3ac665f115d53d369d22b0b55deffm57]o/ ODs[&9'M.+}lP,=q|'\'ME79!׆L/b5?ԄKSuNVodf,\фt/.jh?6.RrJPCDmnP3fqŏx]}8z>K2017-04-07 14:20:57.643239Z L A51702666a993a406abac840ced0d985c785060b58cb01e3c4f58bf3b5ccdeec8|S4"]M8~zT4a=AY]7rk[j sWðV߇.'fNpvݣcENx*:D•9#DD~CIkIJ돒ˀ,%DHgi7E-' ٕ \Mtb}^'T2017-04-07 14:20:57.643239Y L A3b5425aa73c10d82eaf7876f3f7c24df39a971db88b44d1c7a9309d2168b2f7b%Z"@;s^|O =ƻ8-B0 ѷ2J$F6GwtE$U2}(;F!WM)+F{#w3 w=8#pOFMR;2_dgkb3GB10~G%Jk!2017-04-07 14:20:57.643239X L Accf58ad999581db3f0d31d0c41e1f6ef20361d9efda0d6e5795d633b7efc52b0I|K"ۋ(O}Aߝ[S+ C \>; ӹ0{mE'`XO"d[|%E9( g5zt+BE!o(ۄIWV .X+h/W!P"zlcXP2017-04-07 14:20:57.643239W L Af21357d6aaf8aea2af0314e6d7953991a6ac1e26fa57117d1fdda5e334ec259d?6xҚF2,ync;R!iF\5ʎ0hʋMjh,o} { $}!ORJkעMy<]dna>0)DSL-\nWsc*$]7P =jt=^UnR+j0ddP?Bv2017-04-07 14:20:57.643239uV , Ab48b73e686d9c54dd5208da9f12bb2e57c39fccb022dc75e0297d5d01cba7951aaKo;$tւeBUCJA_(2j"0kIyN*Bc9KqEK2>,A?t &Bej#N,AVW/OM׵| !Y#mhH'¾p'"2017-04-07 14:20:57.643239uU , Ad5bc231681954c98bb952aa984bf77c2c30929a2f2ae11658fdeb1e7c4f2e1acQgfWژ2 `:A:?~UCps@CA1fBYc<%TNv<v~ܠG݁B/RF̂L&C9e ~B08ҩ- 2sjfk* i 2017-04-07 14:20:57.643239uT , Ae316044e9a7990937a2e3404382fa8fbf1396e0d4a793fd5a0ba56ada89c7edfE (4lPRZ*cLD27;rVl.eRhX0@76&ҧ{|a]z2#t<)XN*(N %c6痻0hwⰈ=&DBߋ@1LK2017-04-07 14:20:57.643239S L Ae3a86bc0497398fdd9c29b836ba68c9a211e14476dc3ad75181bc7eb2c0d7c27Y$OSg#J)p&dmG1 QHiJyN&qm-,\~,S3e' i{˜Wp71\NJrmN3%'@(~s Jxnd]c-CfezFI-p9D?Lcm2017-04-07 14:20:57.643239uR , A5521e29ce7eff0b6dc4a677b4bb44eb840f7e69b608e4b1776f6b44478918c18'^1}5Q}3k%?,19-ImUA+ w-b V9yq!icsL`Cn@W7uS*.oM+DU}O^)OH,FW5;늮m^R OX,Qc72017-04-07 14:20:57.643239uQ , Ae57b3bf674d20cc0e4af56a55ac7b16713414cc0a297c4257b6a15c1ad8a0087j`eΗvn߶Z"+O(СHd".*qă3w?(DZkPGih0O!; | ʥwWV09Pp9w45'7@272017-04-07 14:20:57.643239uP , A437903353382b80ee5cc4fcc5a1db9f1bed45eed150f463f71fea1ec0cb7d970beR0aZa;-DRe)xk8H͞TmS9%38x^ͷQ vt tbYGaނ3?F5) "4WiP8rJnz*&7X_{TR93i#2017-04-07 14:20:57.643239  m L A649b5e5a3ded80f2583800ee64507e8cbb52288f688cc75060052c6fb3bb2834 j ԹPX"ٗS?Nŏ4z={L5Uġ>k)&ZilbfB y Lˀ5Q׈(NFN!y3L1cs4?D8d?֐S8uB:QqfIR?2017-04-07 14:20:57.643239i L Aec68a2af6ada90fa736c147a0300158572e96e110af46a42a505af8c273b3a2bI<m(J CaӴ?1G_cpܚ<.D^z#jpY WV@/$TbMV(^4t7OzzƐ Bd/vU# bkz:]eIdQg%r'9R2017-04-07 14:20:57.643239h L Adb7f29466aaee188dbd0cecc571e797fbc2d19bd09c9e12c111501636eb78907vP2kG[Onݹ%JkN-RBzK,P3ǯ *)-e*w|YX )1t[:\ͅ&yڍ'&֛`K\ָ@LJ٩[1,"g-}5.~J2017-04-07 14:20:57.643239g L A0690f5175bc5cb451b5333cc1d9e97188e4258b98f7a9c2e7e2041973e2d48946SiN7x3%>^ы c ɣѱw5WA}G,#I[B$D&0mCgʱ .9*؃ O#$ ފVǣ |Bңv<͕GN"AEXdI2$MhA2017-04-07 14:20:57.643239f L A6998fe2aaf172a2b4fcc3e84e8ebe36bd8154b158b4adf1380c3f30b4f0bac1a!?=@H(YBrlPdaT.T122Yq/UP&D+ȟ5nya*S< Bv_3vo:f>h#Blru:nxï\NT 'v!(IS2017-04-07 14:20:57.643239e L A016c40e6a956efe1cfd0e9e254a8fc2e03d7e8c62c1f7703126b4b6e1f5f9b55ِ:'O)}ZU@oǢ6"^s DzTYdЀyҵk|}!6d/5c<2bp!_K4hو^nUoL{|sH2017-04-07 14:20:57.643239d L Ae15951b181141f41c8f2c844b7aa0c7216849bde86e989156ee0b61c552f1e75X`ahZ-y-^lt87Z]1hG @۳p3皈p;A$AyRx>Te'c ^LLr|ݳ7zΩ6.s.FZ /a*3vXa"Vxt8*@4 Шh4qH2017-04-07 14:20:57.643239c L A1255419f82bd5cd8efecdda06a9a0d750ab7552454f445d028f40820efd06412ZT4-}zDpSf"_NdPFyٷtPT[W49p N]|vU{ SG8 ߌ ] tI1 GNmӲ'Be p'a"J*jx撒hG)uuPf2017-04-07 14:20:57.643239b L A59cd0edb12a2554e235bb45547afc226a75052cbe2238cad74845fac0a8f1c1egU `_8 7]c\Ck`4'ϱuZ8| z 39$%sV;_Y(L"^Ƃ%˦Lf٢X+ғ׶rf~evq $&c@MݠȌ֨b 2017-04-07 14:20:57.643239a L A4e7ad20eb30262c4160e1c35c2d57e83b1cd18b04760fbbc8e7f06dff49cc91bsjݨ8Tdwk `vs^Q$4[̴riasбĔe]:B65b`o#awTEv.Fm7_֤`|n.ŕ؎`b\܁08U:Xȡ= 8Zz~?xK`U|2017-04-07 14:20:57.643239 h h| l Adcb29f7571a0bf5b09cccfcb42c4430ca619486938a25b214549454e2fd89bbcJgޞ.KH.#SF/~Ka+y6ʂYFrƔ2017-04-07 14:20:57.643239z L Afb36f9ef1f33329c891073f68b2113626d9577826379ad54d5d822510a2f875bQgX $֩kځX2HkqCJYYu|C)@' @ D&v5;M!|֧W%C38 1IF!ϩٲ>16j6 *ʸ4HG,FCL;gUrUbfg{#n%$0Fw;2017-04-07 14:20:57.643239y L A6f5e1eed70bf280344c99b5c7b713ec6198c33e3dafda3100548a75eed9c30c8,l~%Zi4ifԦQk 79 {WU0cAx=ggY\?|־;@,Թp *ߥ˘%\ ݰo {;_]>uR} EtQ[ƟL˻U52017-04-07 14:20:57.643239x L A042951f3dc0fd9d9ede28fc26f7cad7660fd8f02186351b1c29167845ce3af1bFnu ؿLUQ;ikrfʹ!ɪH3ry}M'w[ )P E^FBc%5?8$#^=$^t]a9Ŕ0] !EU 5FP.`mO 2017-04-07 14:20:57.643239w L A6015ece2718f17aac03a8c7f0a653a32df6b29e963615640ec03e43f2f136419LPvǀRe 4 @32?^7AXͿAesyA =voe;Huق's{,3" N4yQyu~%RP )$ #6.6u,W&zA#2017-04-07 14:20:57.643239v L A6b07b9574fca380e9fdeadb8f158270f391717032dbc43070952eba77d9e8bf9~~_IyVȏ*05@9OrDESt쿙oiHdg5&lMs]2k.Va1<lK{ =g tWJMA}m> a#{^h2017-04-07 14:20:57.643239u L Ac63a666ee610791e6965bc0a793596f6428670ea11a0733b41b2395f94460ea47$1RN'9ԱD5iIU4_"c6")/®we8x)zz MPp(\x :s<*j0u~ik%]mNmVj-* e!ɺ>q!1 cs0҇ʋP2017-04-07 14:20:57.643239t L Acd285f55208c693cfe17f45db3a581c0ecae085dfb2389796f8eb1e826256917"hh-nAM!9Hj7 2-Htt*H@{.P8k͂b邝G6??( aFN f6„>7;5 U?6T8DXXQXJ+bW5m:D:aSl42017-04-07 14:20:57.643239s L A29767b785e6f896109e091e581f00877bff26324fe43d6e205139a41a851f6e0 ) ,Onn>/zHXlFLXݹo9frh(F/4HiS}qkzFxZ^֯e#./S.JIM_̝ct%x]%9 '[(2017-04-07 14:20:57.643239o L A69efd71ed8767ca2cab57352f4218d0e136224cabe0708acc38abddce4cb46d8?Hin@Xg4p x0!nj&E,~:]:ksr?&%(]ůnkCd8̈́uĎqaKNLZ 9bW[}b4YY'yZA")5^Mlxp2017-04-07 14:20:57.643239n L Aeb28d8f1eae2b40f9d084a8fbb8b710633d5c0bc65640497cdfee8546a3b08beV+9jFoC*'&)9qN#ړ?l~!R;Jn/1[5)+zGU"{-<藈!adմF+w>OD堳oys S$oBXM["MN9n=2017-04-07 14:20:57.643239    u , Aefc5ce3e99d598be6cc6b139cade4a4106ef0ffc456ea64841188942c7a424daPv8݋Lyu=ed1@^ z mrhC^CxA-}jo 55 k3<3QJ虶03Q3>ŝʻ̸KS8qV[3 9#r5w2y_2017-04-07 14:20:57.643239u , Adf28ccd7ef77227c5c16cae66171c438c4acede10c7d0b2d09013712d59de536.pZk;R%o}StXDK4`}Y{"D^ܬ@@w+},)bS =hfI EE =m~nU M4dk.x^cG$ ;^ȝO Z |F2017-04-07 14:20:57.643239u , A856d27b4e81bac463e7b4b546d79393d55b1f907343f17a54b8f1416b3b93b04W\)b4lPu) c $m G򃣈<4CuĘ2g as;AMsXuMf\ڽhM3U4GsESmO.FcdDj嘘FE2017-04-07 14:20:57.643239u , Ad1a4f95cd867136302662d150ff4e04ac9503a4c2f8a1df4c5b404cca31e9cf5CBJqnþX?2K q")$,kK1 s|*"cLzr]kE0?pY*x=xZJ=t].fI6W>0WkLlp$Dryh6k'bx\KnϪ+SY2017-04-07 14:20:57.643239u , A7db06f06eabe006b9fa6cb7c602674858ca66ea3e0641d723d20515c1bca593d}%NUNv]uU遍R+]}E;|]M]:qROr=en9[S澠ڮDAWa jZw&QXǢ_[nU o Ɩ*ݱ22017-04-07 14:20:57.643239u , Af803fc2eb9ddf16127c8f15f49e743bebcdf6f8752b0ebe5258c72ab4ae0a3cbּX_Sm10aϹ\&"beʁ̽KA lBubK9m'ooDCZx͇u+yšdS2017-04-07 14:20:57.643239u , A4b8cf6b895d9cc85f0870a4b12d151b3ef897186acdc62e82fe7efa1a175b795$;fYW l4f\5U}t1y<_$^ʭk㘇  |sukd2017-04-07 14:20:57.643239 L A79209e600cbeec61f77d3ac46274c8833ae649c7ed58d4f91137cc2015faa704nKd^O/Nk6CynU[%J#,i#`K?l9 #qega1@H C?+`5(fV~!wiΧP-)[X'!A]8p48cL.[ c#muP2017-04-07 14:20:57.643239~ L A33d29259ff06a78bad47b3fde54a35bd017e611e149d38fd2131354664c2268c30'0tP 4d{>3^De4&E*bչ>ꩩ;VHe-eoT"Hh idg? pс߯7˫>>]iͶ`>cB);B.+HJ8^atS`WvNPR2017-04-07 14:20:57.643239u , Ad614859d23cc8f9e0fc46340a2a5805cfcf5f929a22829cc18e873265474ff7d4BZ-落Cx; 47A*i (Jά(_:8!H%Ti/F!8"]חbiG/#(jϰ אJX~8%7]ih-gvm*}[zb+rK2017-04-07 14:20:57.643239u , Aad7d5380e5d63fba08a04c4effbbe945805ddde72960ff5edac66b4637622094os61opOZܘI"3ysCB9SA>BZTQo@ YEE@]ߖ̘s"z}F Gъo5pE:ǝu3#;9%+%hC17BV7َ2017-04-07 14:20:57.643239 L A560f96f8eabdbaf5202f0db45ef969a68a487aa980a1cb4ddeeb27495f222324 V!GUFf =3u>(ܻo\X{_]#UkT>FOz)*p欶!T,ĠEG]:A 1Q|q"N BL,er4;bZWg_ZW_t4A2017-04-07 14:20:57.643239u , Ae1a75f9afd02e5085ed44f2658e4cbb4eae0651263cc67eb2d253f5845c0a1fe*`x<ֻHivw-ȷBd%+b l aT$uBX\'GMUZwa)kFَ,\6gJ":/ ƪ9>OsM7C|$G5Pp [7v2017-04-07 14:20:57.643239u , A2904f1d809246e937c62c13c109f5e96eaec7b838bce0fd1816bc1b9f1247e5dOI}uȌKsi4bpϴZ]5鲆>5e;7}Da<ݛrXa Ί-sOC).ٳ^`>.@(12mfD.Ӄ+-FFiQxyUZ72017-04-07 14:20:57.643239u , A11e5ec89aaa4aa0f8b39c39a71c2d9ad803d0578d4fed6421a3806658d5444e6e!X [E.o>{ kB<5K}4nʸ ItT9D3N,g>Qr0E[}GA>z;.|a#T#K>t 23gȹ3H.$2wB`2017-04-07 14:20:57.643239u , A601d187bd6ca0cef903203800536808a0e8132bdbce9a797e160fb47ed25b847/ހ&OĘahHi[ ?+HVE3<Ѡh, ;=5Q KдI|ܦ{*/ZQ+ {$3 yɨ>!_y]{-*LN&tP$&Z)o0ۉ?LzB!c,rS:ĕiWLխCRJs<؆˞t|,Wy$d7Z}-J+2017-04-07 14:20:57.643239u , A610f0c87b6d74cc4ba60719f31f2000fadd8c2ebaba4a67449c58ce2b488b212W'VlM`U}Ʉ2;d} K nu3%Ԗ^}zn}nv?-UVZ~ kh;/ơ(nx/jAgU^e9Z?cGH2017-04-07 14:20:57.643239u , A0b775dbacca2d07e555da4a40f3d7fcc84da8b72f98d506416e8aea713e7db64׸+1lN`g'XK 򓤞]ʎ@ R0y5NGd@ ~KzMPF F-#:6Cvۗ TnZn^x"xJ2017-04-07 14:20:57.643239u , Acd6e9ca31fabcf22be8a17cddc52d9ff6cbae01447449c1db9c797ccba73a428N'!.G.TJ06~NХ&$;kJzއXc Eaˈ\iĸђ,ՌbєT?' `VZEc"$AVL8ɯ F:x;Mmz85Os Q²[*)?0'M>@⚚;CI2017-04-07 14:20:57.643239) L Af9369970f55ed2bb7ca4c0e6528806a0ea445fbeabed7875ac37fab54d1e3a0bA]ių7'[j}R ,H&VʇN/}|,/nO /`OdtTʟ3DGsYOrE=cNI3]W5R \of%]XH[6z X@? ǓИQ/-g H"&EoC)abbuV@T`3c@2017-04-07 14:20:57.643239' L A26279986ef75d5bfa21ac9cac4ede057085c7392a954f3ab2dc328c281e13738 Rˤ =L!>A{$5_$m>C<aD2 C|AϢ{|> '_BLǬ6C7|(2h~Z^hLo5Q3pCUƈqurʬm%Fjf.}ϛ^ÍRDb?i ݚ"&>#~֟2017-04-07 14:20:57.643239& L A91165922439a8b2cd02468741913ab8640e55304ad9569b8cd411db4482fd629~~ PyrfL%[[O&za6L6~ ĈygB`&g] FhOڼˆx2N ,^jH)=MmnacAVSbWN 8[Q*ww2017-04-07 14:20:57.643239% L Ab1ef97cc86cf522aae5dc334df7ed7e31af06d90ed4cfe726933378af88e9e857J0]s-m9+khF~+~ܴ(Kض{!(@Q~81d}FÅ"„1_GK\ 2 Ip^EFъܾdNQvoTuꔢE"f:QC9V|ȇG2017-04-07 14:20:57.643239$ L Aa42682a4de4748f80452dacf3938d4642afcb1a3a22ab73de78cb2758a21653ezC 2tZ`Kh5G4v(tRܡuN47ɊsgSnWVB"4Nm8yG93H-l7~h hz_A4 dHRwBYTb">+P{Ю;2c+lp4,2017-04-07 14:20:57.643239# L A8243d6da987eded17ccfc6ef2e6725659000149dfaf5b03c19af8ee799339274Q5d:MEc tu| &y`gm0&eGPԛiJDqڊхRO8ƯyNXO_$w$E~WKw ph.3Dck? g•WL\RIPCnlرOXw2017-04-07 14:20:57.643239" L A4b80d32ac2aa9dd1b326e1a62a5bde4878f698229ce56d91fcc1d51178bc6de6<=7vkӲWN5!J[kq;%bNKLe 59<2sˁu"Psd'bT$a x)z]+ں0Ꜫl5j ]HK!6̱_2017-04-07 14:20:57.643239! L A4f8233053363737a25f88bc9e979a691fb036e0157fbf35d9d658f37219207c6lSApi)>{COne9CC,x9J襜oLwO&oc98[Kt)=GN@?}XS[۾^~~~A`I2w7RRx_]*$-2017-04-07 14:20:57.643239 L A8207814765b1d2e9efedb5826b99bdf12e329c5b83127c0999b7b8b3bee4bc2f:ua]ZuJD[1DF FҢ 1e^f,C9sojbs+E=ף7ȡa-<_'/-Ap\V ፐ(3иlm%]Xb5sݻD89Y22017-04-07 14:20:57.643239 L Ae071b9179257c4817aa197ad422e12ee36f90a36f29be9f6e2ca60abe9ded8ac[zs;hDFܴiut3 ػȕ(G's sKħZ1qOt=GdiV~u7ؼgZ4HkA&a݊2017-04-07 14:20:57.643239 L A8ff63ef18d45d43ade204f4a27603e3033607178ddf542493e1e10226840f5aaj$[|' rbLY<龏aq'izɚ<M" (Av )Ʊˡ7bz;vgBPǭ a8wa?UBHAqԲRg a=]J{QghA6]Er𔔲c2017-04-07 14:20:57.643239 L Ab471c54f6be8b8a651b186fbe6bd9a8ff48b92333ffbc292ee8057b0c2fda193KW}A,Whd:O 1/}])5ySH1=:K=ϛd(\gM#/b_l&i3P(KYbRxfoA ==x QW3ob蟲H'0' {Q2017-04-07 14:20:57.643239 L A1642fbfcbb453846983e9659a09628f6ebaa45c9f677dd4d43a9edd9de4b5f1ctp<:wk"/UgB|sG$_Mqn_^ꨚ][ /P̘KD?ې\Dz0%2017-04-07 14:20:57.643239 y y9 L A58fffc59824c4fdaacf11288f13bf068c36cf5b8ca7775e3a220334434130b47/U0{N|oOU8KytfqRR(EZ;a̍,jщA-ravqycFF땅+B+i"6dZL\EhKDGUAG˽c&pV4*\I֚52017-04-07 14:20:57.6432398 L A6a3f34ebad9526ecc66678bf6bddcddc7e77f47f4b75713220b42fcac687b628( ]rgIfh\pe 0PG4 $(>Ƌk4m؃lU IjX$uǓ?y|uRh eӴl޲t @}|U0&8L/dzׇcOkQ=<2017-04-07 14:20:57.6432397 L Ab7dfc977da6616b190c07e0c5c1ba1b3a5025c7a8bb9ac6f339285cd0f96bce0%KK,f=|»ǵP;n'oOòh^DWASfԵFLwv"0e\BaT0YxrA0:&^ljt=B``5޸(Td #yy^j"ڸX6SY>2017-04-07 14:20:57.6432396 L Ad574d425a4824693a038c999d7fa0b07eb6f39ccb63a5e9691958d4c65bcae43T0p#@O0NTua$8VMos*b҈cް'!Z/:9%#R-łt.#8[7bI1=຤jI uK J k,(Y!rHV2~2017-04-07 14:20:57.6432395 L A12ef723083ae5dd08ca96084bfe557da50546cb0f0f9bf4c2faf92479cb7dc6a'M_ M-&%#izSM 0}^@%A1'ݮcnfg ÿt{ĿDlV1M!C!I){I9΢S-x*qPہ<$ڸ¨RnUyt{ד$3~T`2017-04-07 14:20:57.6432394 L Af6c092438245ad2a7efd750efc06ced1dc9f9c10356805e1a255306891bfe181#S _u*t@bm:ai >su˓4\>ފe(&F.5Zov.~b,މ_@0"B]3-%.t}* LY0ҩΎ:=bºӝ+)f2017-04-07 14:20:57.6432393 L A1c2b401a0bed6050819f14101671c4d9199942c22bdafc3160017504c294b5f0iiB^^[8r+=JxhN?|;޻Exxe"q؉NP1X&<w?+\&881㿄YsY5)`1S57xxޯ,o@oHPD%vU2017-04-07 14:20:57.6432392 L Ae12973c523ca85e1e7a1bda9e7f53a7a8934f6d2e2317722e0a95138820dd666 ;s"s_`!e:֜oͫr}Iϒ&гYH &F$A9*Ź2_k%!z b6Wq}"81#agcQSön@si:C)w Jc+!ˋJ2017-04-07 14:20:57.6432391 L Aff9afc78579c002a657a51a94ffd9f9f745f1efd594f7750eb10d9c3d648b012u}qub4Ҟ3/>7muЍۚOׯ&Hd9XXan~!_۴5[мOf^E4P,a{ب#oa 1Op^[-IgF<M͋0<2017-04-07 14:20:57.6432390 L A65f82c8e12a0e84af8bf5adf1579c332c777ff1db3486ad0795f993efe8ad3fa% M=9yDPusp:}nZԞ?ۧڽ1h3O(֠uE32KY+I4줛dPְ p=ҹM!V0@XQ~ F8`kJJwBs2017-04-07 14:20:57.643239/ L A7b681dd8837cfffa2a8de03354f225e087387cdbb7ce69723a61f65120e2ea39#~^8Y%4:ڣšY~R28']|KR K.a [~rf!R4+ΐ` >K>Ms̮ BZ,*>@PLc;UZPs&̷Y^gj@2017-04-07 14:20:57.643239. L Aeb46b84c282da78a1d85ac8bbd231bdd7cbd9c34c9450caa32a9da089ed7396f~!Tc>l 2CO-a8UnYyy?cYH@Ȼ44x>zN6H ;2-dn{B(Vz2m~`2017-04-07 14:20:57.643239+ L A8a3af3a8b6bb41586489ea21ffa137622f0c936c379b2f918af616336617a27ct^g>/|H ~hwy72hf$Vv׊9G" U)jgUr2~,kH;֏ZԸîQ@GiN#A`es1R0^22017-04-07 14:20:57.643239  }uH , A027ef384a36a64f429182ceb4f59d594c97579c68f02a7a8c54062aa021703bazqdoCt:[n˔孻NaHz--YvKe@<3RHyMd?S£䅗)_`saR5"qo *Z?I޼7YKO&b_t+ 2017-04-07 14:20:57.643239uG , Ad80cd7a9dfc5a8514b3a0e7276fcd0f176cab3a44a2667390b2959914f860c1c) OWk;Ʌ:'Sd jy554V L%4:Q4^Т.)XS"fËfxkꮽ2017-04-07 14:20:57.643239uF , Ab3f938600adbe9d80525fc39c9838f1ceb0d287c09f65e133dfade9a11bfa720@O_&޼/  . /c F% / L4XlNdc":!ĥ_A-Ia=&c<\ؾ$kl ^ChS.ES_Lh/%st'Ē " ?5c82017-04-07 14:20:57.643239uE , A4cf0fb3e36d92689aadb5a5ffd075cc30d946f6cb447d792f758757646bfd46c?sg;R,IL΋ĉGJ3maZ==x) \UFb33PܶXz[l$n,g }0G_j {laI9 *"8F}ť Ե+1&l;2017-04-07 14:20:57.643239D L A3f8218504bd85b6f3406dd04937b7d50344c59aa6579c314065c59b0c17ddaff.`Oݱ Vw|*y!hpt䁝|_1ڦ8F3oٻΑ-bP'Q.Ck<^ ,})q4_;2017-04-07 14:20:57.643239? L A8580135763c43ac3f142d731f9ded5d08c40124f250f872287661292b48589d1sQ0Fokd kvX`Ye-4l3ʪJ5!Vcck~<^[PgmXm3@&CtMԍOȂ%0CX,|8'M銉?c*;"$$s A;6Lu2017-04-07 14:20:57.643239> l Afc0bbbc12a99c95aa7b92417ca377bc9abc071170f0a3e0bd6930037cabb25adQބ'eɋS5̜_X`q ٷN Fr~Q@V/ .eu2&Jwi& i:4n0PD+}͢vЈ`E=HSyO4 ß *q|sYх@1K(ݛm42 +I;<ęyDbhrHT{0Kp ъ+{2017-04-07 14:20:57.643239uR , Affd470fb8a4064883565790aa17dce09713fda5b76c5020ae973fc23e440340ej 1`etRcm;881xT ,ǿ96<5.v%IOQ&`:H+q^dh" e.@wM')>a>+lQKcg˷2+Fltr2017-04-07 14:20:57.643239uQ , A8bdde3c013703ff8d3e65638a73732353439abe2815c7dbf2dd5937c863832a8 ]Zg & HYj_7`;LARe q/RM ]*̽U 51_$rM2njYܫz?WϮr[绹 L>'vq1ҖkJKJԗ6yJ mF2017-04-07 14:20:57.643239uP , Af3b9bb42750f8b84e9ad355b100c83e7d06723081e497077cf4fcf508bf93076_SHfui8c5\8j&w.jlZJZXLJP|5([m!>[!L<2017-04-07 14:20:57.643239uM , A5f5e544596eb262fc47075fdcebba1c06b436474607a241436e44ba115551618׾!f(}7':ۯM *dn +wf%Mc%8Md>2`դSt՜UCЊ;6ì=dKzyۆSƬ٤ےmS !].~o}͢M|*2017-04-07 14:20:57.643239uL , A98edfc06d826f15381f376ad18aeec928dfe348c49f98c66cf7ee437f0a4942eܮG#G͸Y0ԏX2 Ij{(]ъT{H:]G.g:4 aYkU{x6^jM@4-jh@٘싱-0= ӼHCDzcFi_rJ2017-04-07 14:20:57.643239uK , Aa8449f785d9b6245d6ab23b5c88f1cff5b6d20a066339d878eea9cfacaa361af0g hΌh>*H0ڱf?aVl沃8W37RBi|^gs(^ؖ]`S\i&쟵!TAcXfPl9 YS 㘝r+v^[2017-04-07 14:20:57.643239uJ , A77d0bb268a532473f58b84232ad6aab954b38aaa121a8f37090f4453a2fb6b94_yQE.B;jfh{9/w#%h,=NоD?{][D,9}*fYI" ;'.-((HS_61sPu,%:u_ܸ+P2017-04-07 14:20:57.643239uI , A1cb652241d60e6720b31d947b94947a2f358800b3b567652ac61285b02790c13، U!n y ȥ0:&?ċYwe箆*JcˈʘZt yc>6up [Sa\~i_C9bbz%룝dsBYD<rdx jni"Pq2017-04-07 14:20:57.643239    g L A8276efed7cf66b3e774c96fcaa0c304e3b810b1a4686e620b601c977e2f9fb8c3M-lt0-6J1imW q'¼[azYv_&r柏!g!ڎk@KxCH@!zOX52017-04-07 14:20:57.643239c L A689e1b5cbe0ba6e30972e6d627c04e68b715288012ca8b09cb308aa340715c4dHR9@uT.]Il'iJnN[xЬ ~[l ΌsFH 0uyBfՊ?;~$8W*|Tc%+{B #y_H@Te:*TO >I2017-04-07 14:20:57.643239b L Ab86f75e6d1bf41ebd982de9b02309e3135b25e3775e1ffd2ee50a3f07855f28b` ÊLQw3H$P9f@'Wt._–vWV4>ak{NGa{uMkV ƘevJv1{#)`M.\)(A@ږ@GƩ\A#׵*/2017-04-07 14:20:57.643239a L Afc74ecffa15989267c295b2ee82c159d3b16f00b956abfb73fd34e2d5a66aba0_[IhB4<-Jhs9ƅ@8#2>b"g5W^0[ptg6R9_d'%+}_O{Ɣ]-_ \,C6"}."zą4%^zBafbD;R2017-04-07 14:20:57.643239` L A658bf080f5c5bd62b201cf10d56f7e243a92ef3d4d32456e55e54e9134fb2d7e F5?xY {җ1Bc1q'J1cm>ީxg;tز߬<81\>4uڬ4;h:)%11J:G۵O7W48>c S"<+"Bza?r!v[ϳN{rSMc5R$׉_m 3D.,?S,3P.D%˺6uۯ){,gKą}y%zr~Om\$7`,2017-04-07 14:20:57.643239^ L A9e2b07ae2aa9472cb2f4b338ee14d37591cfe94801908157f2a69bef5aa69d5fj3+D 02>_|uM]l(Ҳ۱GɠqU+VQNVa|x+zFcemP'E=*xO{{$C5,&A8+k80 +^tn?ƒ0nW2017-04-07 14:20:57.643239u] , Ade06ff438d374bf99772ccf7c14c80433300129b91b7a8061009d945b3c201522νKEtF;0*(o0@' P[Z\VjCJRf+##bOZCTJ(+Q5a:7Jɔtw>@Kڲx/*`Ԕi2017-04-07 14:20:57.643239u\ , A988472a0d490ab4b2538dc19404f3877f78e50d9a61ebce80e9fa48336d90190I)&otMCj Ӵ w;-G`~&J}fӢha,5PB]ѥ3]i,.j3C |kc;b]S{5]}V٨0- k6_0uy( 2017-04-07 14:20:57.643239u[ , Ad2d2f6209c127efc4dfc7ca2dffed808441b46c07e64243e2d3c47e92df1115e>R2[w_25ĩw( p\e F5.MͭMfЉ/:t KGj'.Ms0͐9I#1OpLJm!4nh}<ër1\ГKJΩ3&2017-04-07 14:20:57.643239Z L A359595ca2eb17fa8c075bb987aeb678b37a95f3d15edbd38d49fc084a0ab1f4e V%xLx<|ӐZ&V׎g /9'Ai?s/TAxJ'J>TT6tw([ǛԭKI0.ݙA;FFÁƲ`){I A"}{`<U[nqr2sY_2017-04-07 14:20:57.643239uY , Aa603c4228cc10d9255ad0067545b5b94ab7ea4dcaa996303f080f610d3e90a7f@:!#+re#6cү*\./TgX:G\\OVzၔ c*&B'Wm|;K.}a> k)?T ZPg/*?WG*\  2017-04-07 14:20:57.643239 y yv L A6924ebe48d68e5138bf518c57048eacce50eec8b7b81a92c6d7633acf290c772`/'LK\J8g24 xy 4YL|ihaxr`yUm+s;z 4} (yOd 5~.^3+Ed.d= rzxMC;[9*sh]e)2017-04-07 14:20:57.643239u L Acbb76e6690128a43a81bdd2cfbb3b5b261cee0cde148d9fd9797a11e1b89a602iuALtqgCJXUsI?"ЌNLO9oZO_LN"uXxT1ɲ8|p#MWVD\mLak 81Epb8<.Q)Z)'/q2017-04-07 14:20:57.643239t L Abb24ae151fa29074bc38ed3da7d1e247cb8aeb351517e0cf9930d601f85aec720`XVADҢ3즜:T18htu%Pd"~v43c+ S #2Jɩn1̘֛3xG ÷b lqRaLI U,NhJz5L*y`nq2017-04-07 14:20:57.643239s L A6b19769bbe803a8e8c77e9e9b8f8351de2a836aac93477147600a2e4d35e74b6ye@TE(XN7XQXA&Sߛwټ悶;׶dC;+ ;D+W>kOqr.ı& k7ͬXokg0f2017-04-07 14:20:57.643239n L A4fda94e16b68e3ed97f324310802dd6c9ab3c4661a08219492ea06efaac29a37ȋZm11 3%X09.^c4΄{ϜTϑ;2:FxwD?J)lDe*Y3Ss>/UGh0PC(-9G@bo`w?<+|wyQil2017-04-07 14:20:57.643239m L Ac3686c2fb14946ddacc79b4613c97e77f1dcc55e16bb2c098b253ac64bad17895;bV!YK-ƌ[/9]ا+WpZ[nb71W$(ĝ*nBw01F^ܖ˝_ܩH\wq`-/..JvQOk)7_cnc_I]Gڂ2017-04-07 14:20:57.643239l L Adb63a751b5691ee54763919792e7845ce6e0d6d1f6d5b8d53be3e00fa213d44aH:޳ǙЯ?Kv̊$B:re_(K\`U' q hs(r.xDZ]tb=0fFQ_/`KS _7ؐBJe|s:=޸<2e!H;"mry2017-04-07 14:20:57.643239k L Aadf3751baaa170cca881777342df600418b81f8d496bb4cb143c5b5ae65cf825c"рUși6ύ@ 9SUw҃i$R`,ݻ/AcGO |y䖏m WoA槑 'wC(Q i ?{\kЙSD25CVyx@dG\@2017-04-07 14:20:57.643239j L Aa2adbc9685264e448772ee3481d8b3868fbeb534ddf3c9ed6c252676bf606d22Gz G&ngfK&4>-ށ>xi>OUÐî5N,7}8_T 1ucFqU뎽㽽;]R~2!AS-PFz_N7 | ۨΘo5y 깴>)"v2017-04-07 14:20:57.643239i L A8fbaa3e4dd46fbb0b8d642694dd0095005b74de03c53a40b75d2ff93dd5026a0p=]I*I`/EhM}a# 73a%4~I7mw?%Ygj k+?Fb)i8c"8F|2W_Q<`͘b/1#eF ~4ڈ@P})X2017-04-07 14:20:57.643239h L A38aa9614b7d5506e450628f367a02cefd1ab86f7fdba64ebcfea27cc2c8dcb5f%!BtڶgA<8&l]Z|vOꡍZa19_8Md8P\@h5@d/2{$F~$92Mv4ʖB~p܉s ~: d~ {=5ɭR}Wr&2017-04-07 14:20:57.643239 i i l Aee5515d32e76e35a99ca8512b47e97046df58c7bdad97a8a337256031b5ac2d7̨ټ4ȲUƣ@y~pݟ]1p)5b5tO{׆Ҧdj6 rɢeTwhYU71Tw,~J.q7̹i0AZ9[Jqיj%b\x(T3A-/{y2017-04-07 14:20:57.643239 L A664b1b8df9f431b08a4d78b425ac0316d3fe25de51e6013fe2ca2fbba1aca575wߧ^XlM&K_*{Ҕ0U-/ޅLҘǵJ.cKM"cs5A1zfQG$şwG( ͮ6Pc7}CL8xWPrH+D5-w] hSp(>ʅ((N އdGn 筀- j}b$sx~@Ѧ(V'oY'KLj@Dz &GQ'1;v:l9LVT>Wj}<}!O\L?j^$)HI:2017-04-07 14:20:57.643239~ L Ae68a7ea9f1f07d12fad89ff3fcf3003398982e7dd02a54207f060dec9f30e419JȎ7ZmSa$aHԖS%aD} ݩ]PgToȂU7b+y ݎ뼷0S8\+`' jʛ%[)32Y/T=-4T'r\Ab b ~\H6N-" 0%ؙB2ˍ^2017-04-07 14:20:57.643239} L Ad8496d9d33360275f3d63b2c6f9af046cb7a44852994fc29c830c7eafd8202fbxzQ7bŖ=4VVC(ţbEWu8:_%jr Y_ө"[ceV pGT]}Yzd`N!= ^eL' Z; `T!X1r.KC-hYj v?2017-04-07 14:20:57.643239| L A58742fc9851ee611c9fb85710a1aeba8a10035dc5de8f0b35252ced9cc118d4fɻ3Ū?| 1MoYq}qZ۝|I)>Mdt=u1Co\;Doʞ'p#WEP˯WZ7+eMx!QEײ_CJ|t62017-04-07 14:20:57.643239{ L Aafa46efdf35a5e0e430c8f4f8b689eca0ab591a4da6804b9932549184442c255C @FmH;- j"V,A(d~PJL-Jjr)mߜc"VH$UcrURjz@|~U aC+ RȯG2S훺rvY\h|2017-04-07 14:20:57.643239z L Afc5fe7898d334d85f5584164cfa0f09598db13a4226cc4725de65a305cf043ccYIǧq⠒ ίM0xsLq*Lh1Oult' dB>.szcєz>،wB.v6Ow*)n:: H >~mr~}6qLQy,ŸI%XH2017-04-07 14:20:57.643239y L Aaa227c5ac4f4310bd5ce4fc0166db60a20e59e256434f9b72508e04390788df2-f\suH-깰"byVM%# ߣnsb|&/2LGb.jaDΒ00ʉrj?wN'A?mVSd$C!+O5-krNPP+9|G>;b2017-04-07 14:20:57.643239x L Ae4a4817a3863052df0ddcc1b20fbad8cbb33e8b6665f491845063ae0e4b9201c}%='I"KCc7hBAC1@U+3ӑNjOY A2017-04-07 14:20:57.643239u , A2ff3f4969be35eea93138a51218d4b54bd8c53f36ef760e33065353476f7d3d0T'>hG8U;5X|6#06>j(/]ˉ@f\mH1}&o}r`3R] kPtE|*QؾNMY*Ω*x<qzG*w碉myI[p.@iVß2017-04-07 14:20:57.643239u , Af9c4c662dc6104c7694fa3e590000ca4eea3dfbff3e5a014486742490f11d2ea_t2Z-*n 7   ju\.XVStOވry-CDH}לGCW?t>|oGۄF`M*}y@(WPiVe6N+[f4ic9_& 2017-04-07 14:20:57.643239u , A5b8a674123d1fcb339c1be090fca20edabd070d5660e701713fc74e1738df383{Oxk61Ex)L'|vy Pmkw팿/bVb<;?d7/{x_hN 8c˭3r!(W `dޑyZ|}E(;,}2017-04-07 14:20:57.643239u , Ac2498d7c37f6a02ebd22cf3559cd9f277ccb09f8ad5bf699e27a39ba043686e7G>|&Mzh Bf :%[B[Q!2 /A%'|xY>or_ Ȋ7fvZd&4%{GM(.{ThgpR2017-04-07 14:20:57.643239u , A39c52e4aada679042ed5339fc5508da65edc889a218e5762894e0d280cd47fe4cZjTHVRɦ VQB7.l,%͢ԲTp"$X>|)n &,w#@2 3xN4ƅaZZ'6&œObȌ .ί; _p_ȞƮVMib%=lʼn&5f2017-04-07 14:20:57.643239u , Ac002659504e8f12ea464a7a797de78cac2b18ea62cb3fc7398a9a5389f14bd846t7Xdt_r!fh/+1*[Sh]7&DbpZ~ʔ@8ۮ:x|I# 2YJ[9@=܁:!4 | \$e;D[2017-04-07 14:20:57.643239u , A8908a9ca78d9594ec2396f9846c1e415c78d1b25209a0c800135f7a1532317a0Õxԛ(k/#Y?(G7ᎇb2Ufgkeg]], UA,æH'%q'7;eډ::?=wJ}(hYRk4ITg'J:2017-04-07 14:20:57.643239 L Ab7ecaacfbb53287a75d6e60c7b07ad8b03b2bf8769c059b10e71ef2ff36b56c763~ˆ#ɷ^R*Ր..Ǧu^>ul: 0,#p/ m2-u?JhoKct*`m³+z&1ި(`7 Kz2017-04-07 14:20:57.643239 L Aaec4cb9e430485e9b2b777ca06f24f8eb967d6b1c2dbbde7357c034f203174b43(CA+0=@\k(zCe-ջG:u @фPfCu`(f|hoA*΢WD-S==քi*,iӽd%Oma`nᑐD 0xCeC]2017-04-07 14:20:57.643239 L Ac27aef68c18a0eff3f69dc8538f657d8633c3a4a81c2ac2774b0c667e48160571n3;Jm!"1=m/NGB<, }#(CAT!95s삉d?{N!keB/;d;uFXǛ]KTط 7Tʾ;⤗y}WUvBN5[a_O2017-04-07 14:20:57.643239 L A3135b2d726c22c150a19024bd664acc72c7dc983ae891ef5227d59bd1be84b40h6B=c!] 9aЌU#mD 0 Jl_'g\,'1г<wT+4z 9[T@GO?`Gt~/85KK"E͸[: ZǚsEY<$1}ʱ%0f)TQ2017-04-07 14:20:57.643239 l Ae492307ad876783a867c7504367ebabe2fe38ae37411a307038646c2b0acf56b]5! f{aO%td+G >n;c*G V$F;W@&ys=du>*/ouS%zi9xzE@J2017-04-07 14:20:57.643239 P   # * 18?FMT[RYP$ L Ad4480f0d551c87cdf7a59c3df3568c7b6bcd9222adb4efd439b597a2f1a4d402o#K6oUܣM9kDǏHc;\fyL˓49ڦ i= GjC՞OFEFfW4< WOJVUFsi}ԡLӲ MN 5Wm P pi2017-04-07 14:20:57.643239u# , Aa4d0f4e6d58bb4e21d5f338350e9c556e5b42b4045c69678660b8a0750ea79beDmN8a"Ra|!œU8ѬGsdN2[V5f 7r|fd}Ī~ .o' lMndñO txyk !" HŠ"zg{a2017-04-07 14:20:57.643239" L A100de45b359adcfec1898f25ecf636856c8fadb090dd6a5b012778ecf57289f78=k4T{!4tg7S\6>wDJ=?ٽ<{T9E:V +>'PtcS.H_q+vSVd>v qđ# $u D~xK.h3%5aȩG^u2017-04-07 14:20:57.643239u! , A0bafb77bc8c504c18b0ccc0dbb328aae8aa5f033cad82ead9c5e610220957c31ñM3zy)1)>w+R%Ӗ$7Yqq BWoD4ё vgPoPoMj}YukGbU5zZhy{H2017-04-07 14:20:57.643239u , A7c1234ea538417e501581f75439ab526170a2df69f63a506c2fe98477e939835+ NMqne=dߎm4HuL6T$n~L;ef,C㋑Hj0rxx iE5/A)ؖPt/ sN0G_L(Z5웂(Hw2' s#h̵B3LϹpe62017-04-07 14:20:57.643239u , Af736be7f707bddecebf45bc7f160ea3eaf0eb32e9d90ed4a431716135aaf88a1O4?3w￵3f=ɀڕS">ub6S`vRN(9'|dy_̌ PVXd|ǝ9RQN%!؟phAH&V2017-04-07 14:20:57.643239u , Abb7df69a4d21aa45ec8b382f9d09926e6fcb5b5a409745c3794e7af762fb7e3dӐ;7&3;p mÙ qw58A^:D 78b+{zUltTKLV}q6wXz?J1E$ 0݉@^p!$gV=.lo ]x'%2017-04-07 14:20:57.643239u , Ab80d90a5ccd8fdb5426639d82886a2222509ccf98bb311feea6412d74ec70871yp糊g^iL=3*"s_^Sሥ"39G?}IKtE*qy#h~N$BLG#ԹS9n0nw ţwUa `+ZTzvFltMB 2017-04-07 14:20:57.643239u , A32b84a0dc6a37429ed3cc84a2942408a0caa0e0fe28c8e7ae1947532c74cce7ahSKbkO h>!eq&pN^%5=^>mL$uˤـ>bq&RiReJnKYl$Ox;ȋ1_aa<*RtqqHA`2017-04-07 14:20:57.643239u , Afce4567aff60d8ced7277816f1768469355bfddf333b2ac6dac2879f74f21dab 烇.c51XA"+Ackt6`i:!S0 Fpu ~%N/`i2T~ (RwPSyg 7g yeߝO&hP@e&Wh8nP600 #Z2017-04-07 14:20:57.643239u , Aed6603755b54b1e47ee382d20cef239b9ac37f9d2a490368769e4cb72cbad5967CڔBCQǞA¥ *$Q,F@%!1A=k]vV7(` I]v+k[&JẦ8a< 9;ɻM1Ђ-S$y,{ts>]HΪezt2017-04-07 14:20:57.643239u , A3468d3d10271019d2d71f129a758ef86ae625584c2e6da4d7f32cda369eed45cnEgǗ_Hfvk|&”!ypT縁v},^y/H #BXCC7Tw w϶GGu-2B32ϕ@37eёg:FiɄcW2017-04-07 14:20:57.643239u , A79afab597c78b4fc2cd818a3774c7bad77387b7c43d7cbcf8734e6f6a352fdffS53DG:N {JN s+ fG,qdD“wpɃcatO5o*Gfi8:=Hϭ.Ιo;-[@] ^*R'C/U,> R2017-04-07 14:20:57.643239u , A6ad5d857467ee3977b86305a1a93e087f69dc6584bfff0b88b9424cbd169f2f9e RB2/+:͒ؕuP! FuegVeV2~O"FRrNR^MX;✅g W9~`1{;`k:t?U͸2017-04-07 14:20:57.643239u , Adf2bc66cd99a0dabaef5ee826addc62e5c3eed2e1e1879ea69400de185526942 ɉݠgVӌ?== b;EtL702XsJ e;9ɤc~fIF(.+%]P[38CHPRW+u.eZ ](b6Bi?34wm8!Jkh4ݞ2017-04-07 14:20:57.643239 y y3 L A494edccafb06478e72ebb6b962465aadde1fca49994bcdc41268e7ff7f9298d5=ܙ/BdاM)@WُP6̷&xל+XOܐنX_gH10/NۆOZeZí|,1vv$Y>O6<&oX59\N0hӏXnUKq2017-04-07 14:20:57.6432391 L A04ce55962e0acf178c8e6f94ca0a42751353bfc47c9228a119a8a05bc58ceabd#L>,*v6̰62̬K&`Y&9աE qH@y )?"OevzWo\\X{I2ӖuW:.Ҷ2Y2017-04-07 14:20:57.6432390 L Abd47199a7c7b5a9bdb702c12071920f2edf08ef9dfb1f5b0a6f0063bd3048bd41'_K+̱ς6pΥNi 8DO\n}h&v=6.>խ?j6Ӥ3^ˮPv5 x GR2017-04-07 14:20:57.643239/ L Ad17b52c7d63d953541430f9f085aee50f0afa256856e893e5feb97dbc08dd8f3/<)H 3 w)9k>nF $+atۧœp>e>~"FCZ?i6F*^jv4z0RTTBNi7x+P)N,9"dLjpsH2mc[sK2g|N2017-04-07 14:20:57.643239. L A382e30052f6524efa959db507de0c1ccd888e9a8c3f82e6ded5269c95d112465AMh쵓VDƺ>y0pMyLL*4b^35Y(~-[wqkEW?( T`hTNJRhtͩaDl#t@>)v;]/"n2017-04-07 14:20:57.643239+ L A2bfcf22cc4a2f7953eb39fcca08ca0f644b4fb9bdbc47adbae6351798ab01f68+Rb(+!€δ9iS7:gASc0n>ǭN_RD]Żs#Ά0NZ3Sk4IG1JB {K@JIXgvǴ[@껟߹йk(r.h p(DCVթ5i7c+2017-04-07 14:20:57.643239* L A60eb3bc158021ac1739223721d42ca3435fff23a1aa14b15abdb19c3f6c37d19ώGTmsDZY/PHJGXX>=Gfvk KD&.A񩎾}`4w`_aePoz Ry/Qң٢䷂̞6heT#6h>Q%X2017-04-07 14:20:57.643239) L A2dabb147d02dc8b51d973b093332a3152424113766f3900ea702f46acbd0ef31lٹO 5.8 (:Uζl't=l7U)bJ{t2017-04-07 14:20:57.643239& L Ae9cb562540d22233451c2395961fa72669bb634278d19454cdf84921ad6bb132Qޕ~tqp#-Xl܂ > ʽ ЌΖːhbp┕@Աor@8ֶ7m{Pu6 ;>I\nn+?ÓY-3*/8adZwUUi A)G7hڰ62017-04-07 14:20:57.643239 y yB L A16d4f4ec895bde138348e3bc733b1613c9372445dc51dca585aeb13f65900a70&w>:yi3d_V=ÿF3uB#d+*GǨoިG8ir[ف9ѬtN+xjV>PßPc5ќ(X׈[ta/ Ѣ2017-04-07 14:20:57.643239A L Ad259213842be6f8a319e2fe978f7c840bdf5fc059cd92b7c25b01c10dc8f8670 6‹%+NJ<"|0YM{y,56d_>g옼M Qc̱?3p~"= r}/4LV* {j Ͼ꾃?P# s%D/q14 ^ Ҧ 3G&2017-04-07 14:20:57.643239@ L Afef2235f8f2b5ea7ba4048dc40a2439b9034a82215735df34ee8ffe9d60f193d~5A"zVIu!j*!lL> -&2$4jߧ(-7ps 8v 3+pZ¡&)h@]ly&o]7bNu'cXpmnEs+g+zŒv&2017-04-07 14:20:57.643239? L A95c8db062793d8dd44553634638da4d0ceea4e0aeec686e0e26a3821b9cf2045_lbI-46|Pk14G\0>ҟ`@TzIj3".5wÑtQ f9M v$I` aEQD< ,4ı'#,/F2.AU,/ ACQYj2017-04-07 14:20:57.643239> L Aadf88dd9ece9481bf026f5c523bc54779977a4def8416a4b3432078b3b4f3554E},SU/M0 @Bʣ+/ )q"Ox(u3 3Ju(:\e:ꈟ}FVx@426f?CTO] ī(uwPqsQ4C94)7#D2017-04-07 14:20:57.643239= L Ac294e5510dbcf24c119939555b21aa610c302135d48e2e930acd893024840d45ԧhANzUW9zϷ',Y\C!HEY5 ;<q&BܥM+?ۇn7@@ZHq*%S@r2w^:6d<+-ǑLޒ~2017-04-07 14:20:57.643239< L A95ce19c921a571bc14ed150077217bfba818b9bd7edd7101ee9d972be2ef8be4Y+eRf`s  קne[`Xs8^){,ƒ AfR |h^~d'piW'6SW<JT+vOeKnk4d/1HJ *#|+? e2017-04-07 14:20:57.643239; L Ab189bca3703142eb6444c32774842c71068fd973bccb02fb6fbb6da038b2e2ab :wl4z7<˪7ǠFC"-&`̪;TqӐDӞ ,VgJ;#24Yfs1QpOjo7, O2{Iym^\Hio2;Sur;I 2017-04-07 14:20:57.643239: L A84aef8b0782f26965eb8ae38ca0233cb7b252fec99c1e71a40596043660962b0HC1ʁ9뒬qt@QC7Dj 2ƃ'v ^r^^A2D=-?imwamQ O ~L{7|.&gԠESyA Z Iv NTm4.XG "2\B>D (^D_S2017-04-07 14:20:57.6432397 L Ac831d922fa0a515362625dbb113f416ae9b6bdf51ef2a54165be62e450e57650~%5:PtdDX#Z(Tn<CO޼n Jw4 ͊l3=.^;}نWYuDʜVΎ+:&nXhk֙qӭyۘCfjK+o!hLP+OJAD2017-04-07 14:20:57.6432396 L A6c0987d0229fbc7897bda1b8d05a2740dbb45c8dd298ff5f33a438d27d1b0069WG;3 ~Wn7T;(-iXԪqݤ/S_~:[9}:ɮ U ^z<ӌ5˿}ʺnY҂Y/Ǚ-wpt< BW(6L${> TgY/2017-04-07 14:20:57.6432395 L A355e956238901f8286c90711c0e486983594ffae3c228d39357453b311bb4cc62q 0cb8{h__@:Vx$D9u?P]VHQBvrCWz W*)ii],oo-yـ8U>@)=74T=tޒ_+vl,u`Rl^!:ƒ-2_OFlS̍kG 7Y>㹘UYo2017-04-07 14:20:57.643239 t }tN L Afff264302adbd5aab38cf6a8f3cf3238fcb28874625da6f0dca01709213a0e0aUH)ʆp/=e_ "nZ}!lܸp=s ? KFBL60ź$ gӻeX뎊$2iU4 rw)w^zXL ܆&L0G9?e2017-04-07 14:20:57.643239M L A5739e280bf5fcffe4683f54a12b7d5ad369b82761f023c214a21a47e728ed6f2O֊z zV^B2ߠ[P Ʒ3cZ0k=OXV]VlE҃y#kJ, AI@5 _ | аtuJ<9*J9Fa=S7X)eך2nV.}2017-04-07 14:20:57.643239L l Aae33690101d2980e5c2023d14ba2bf4402129477138e1e69a07bbeb67c47e0ea!Q hM%Z. :j0-Ή[j6X?mc?-̰hhM6.idNEkL\6J=I-K]UdߎQb {԰?^jA/:"]Sα |!:/6W JL$l"No=-w_=B,s2017-04-07 14:20:57.643239K l Addc3bcea4a490cfd92e818d45a91d7975854f61db11e0f44a08f5bbbfd374a17΋:fDCqֺ qđ|nW*~mjh1qcۿx)KQm2e@;JF#pn_F^ }fb8fǸR*[VB.#琹{]}1\Au2*JxӘB6.XƨZZqoښ72017-04-07 14:20:57.643239J L A697d74c312cf412cf9c6ffa0c8d3993daaabd42f1a806c48867ebef94160b14fw]d~&c"+Wf[^:U:OoC%z%hžpfi{2.OcaIrA85%c͖l!O/٪WT)&E28 ƸIbw*_7zzq?]OV2017-04-07 14:20:57.643239I L Af424121c3d778825164c5a54441a4b03d0280803f1305c2083493bf2a8a11d23Ƅ:Xo.ɟJXu}׉2Kgݝ7Ml)awId:2; ®ksjgi! @1׾T&MHHÍ10 wjZDe;[My2017-04-07 14:20:57.643239H L Ac00feb1a3c86b895e1cdbe787e206a72fe00a28f812941feca3b6a4250cac985[cz~<11;0 5.30X<]'+`9ۂ%U'#Kl>:_nF޺75O`c|Fw A*R-:]HGk.NO64".#NT ڽRnT2017-04-07 14:20:57.643239G L A34a71c72e78457b9a0558986c652d80e758061c2624f2c4845a5da1d32ca7dc9/#97gn AI=fE q~@z+V,BnׯbeӾcQ ՉdiiՎf'o?cFp K`3LNCB/a3s~yJwhڛi\K r3W_|52017-04-07 14:20:57.643239F L A51c97200c3090df50dc50be563afee75ed8d4c38acccc9490a747b0e744be565iX*|R!il~sn&eB&kqї10ڑkzֲ( rT^0东ł)z^4<7,KHI&,Q֝$qOz *5T0- .V!6eE]3LWD$ؔq+_ I2}ZU#2017-04-07 14:20:57.643239E L Aa43bcf8e286d9b75592ce51c33b6cf5537b1b8c2ea7b45040bec81546ee08a86FEz_Z(/n Cn1`Kupz鞅O.Jz'VцQsm!/=Pū|4aF*Z!#5.SOُFPYa]4+ 7丿0@rO)WB)d|v 2017-04-07 14:20:57.643239D L Aa6c9661b31614c60a2a58f57656fd7c6a09d60f519424e59a2d5cf4ed5cf64f4)?D)X+H"zoKGdHym]cBE]Y{Z|@e_Ȋl`07ΞKmJ6fF$ܗ CQ')eb\)o`!c# dT9p`>nbPϷ9;2017-04-07 14:20:57.643239C L A7795e8f8dbcff5c24c5785bd04ad970d5c94afbfd63d0d1132ba20a0f8c3cb15Vȧ}7 )#2h!+ɺ}qbjd ֽ̰ ڄAC23VU%9T &Sm5.`pD4?`;ZXՌ'I,.n> U!&7?*䎋.7Hmb Rr2017-04-07 14:20:57.643239vdirsyncer-0.18.0/tests/storage/etesync/etesync_server/etesync_server/000077500000000000000000000000001406140636100263515ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/etesync/etesync_server/etesync_server/__init__.py000066400000000000000000000000001406140636100304500ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/etesync/etesync_server/etesync_server/settings.py000066400000000000000000000063121406140636100305650ustar00rootroot00000000000000""" Django settings for etesync_server project. Generated by 'django-admin startproject' using Django 1.10.6. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "d7r(p-9=$3a@bbt%*+$p@4)cej13nzd0gmnt8+m0bitb=-umj#" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.messages", "django.contrib.staticfiles", "rest_framework", "rest_framework.authtoken", "journal.apps.JournalConfig", ] MIDDLEWARE = [ "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", ] ROOT_URLCONF = "etesync_server.urls" TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", "DIRS": [], "APP_DIRS": True, "OPTIONS": { "context_processors": [ "django.template.context_processors.debug", "django.template.context_processors.request", "django.contrib.auth.context_processors.auth", "django.contrib.messages.context_processors.messages", ], }, }, ] WSGI_APPLICATION = "etesync_server.wsgi.application" # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": os.environ.get("ETESYNC_DB_PATH", os.path.join(BASE_DIR, "db.sqlite3")), } } # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", # noqa }, { "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", # noqa }, { "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", # noqa }, { "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", # noqa }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = "en-us" TIME_ZONE = "UTC" USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = "/static/" vdirsyncer-0.18.0/tests/storage/etesync/etesync_server/etesync_server/urls.py000066400000000000000000000027331406140636100277150ustar00rootroot00000000000000"""etesync_server URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include from django.conf.urls import url from journal import views from rest_framework_nested import routers router = routers.DefaultRouter() router.register(r"journals", views.JournalViewSet) router.register(r"journal/(?P[^/]+)", views.EntryViewSet) router.register(r"user", views.UserInfoViewSet) journals_router = routers.NestedSimpleRouter(router, r"journals", lookup="journal") journals_router.register(r"members", views.MembersViewSet, base_name="journal-members") journals_router.register(r"entries", views.EntryViewSet, base_name="journal-entries") urlpatterns = [ url(r"^api/v1/", include(router.urls)), url(r"^api/v1/", include(journals_router.urls)), ] # Adding this just for testing, this shouldn't be here normally urlpatterns += (url(r"^reset/$", views.reset, name="reset_debug"),) vdirsyncer-0.18.0/tests/storage/etesync/etesync_server/etesync_server/wsgi.py000066400000000000000000000006251406140636100276770ustar00rootroot00000000000000""" WSGI config for etesync_server project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etesync_server.settings") application = get_wsgi_application() vdirsyncer-0.18.0/tests/storage/etesync/etesync_server/manage.py000077500000000000000000000014641406140636100251230ustar00rootroot00000000000000#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etesync_server.settings") try: from django.core.management import execute_from_command_line except ImportError: # The above import may fail for some other reason. Ensure that the # issue is really that Django is missing to avoid masking other # exceptions on Python 2. try: import django # noqa except ImportError: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) raise execute_from_command_line(sys.argv) vdirsyncer-0.18.0/tests/storage/etesync/test@localhost/000077500000000000000000000000001406140636100232415ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/etesync/test@localhost/auth_token000066400000000000000000000000511406140636100253210ustar00rootroot0000000000000063ae6eec45b592d5c511f79b7b0c312d2c5f7d6a vdirsyncer-0.18.0/tests/storage/etesync/test@localhost/key000066400000000000000000000002761406140636100237610ustar00rootroot00000000000000wD'0 |V`a@"^i dhCÎ#BKVUwE*ɓVL{ pOʷ6B,{sv*Ox*vhJڐrY f*1(B)*]Xz!*6=5Vt$d2sݥg3Cvdirsyncer-0.18.0/tests/storage/etesync/test_main.py000066400000000000000000000047101406140636100226100ustar00rootroot00000000000000import os import shutil import sys import pytest import requests from .. import StorageTests from vdirsyncer.storage.etesync import EtesyncCalendars from vdirsyncer.storage.etesync import EtesyncContacts pytestmark = pytest.mark.skipif( os.getenv("ETESYNC_TESTS", "") != "true", reason="etesync tests disabled" ) @pytest.fixture(scope="session") def etesync_app(tmpdir_factory): sys.path.insert(0, os.path.join(os.path.dirname(__file__), "etesync_server")) db = tmpdir_factory.mktemp("etesync").join("etesync.sqlite") shutil.copy( os.path.join(os.path.dirname(__file__), "etesync_server", "db.sqlite3"), str(db) ) os.environ["ETESYNC_DB_PATH"] = str(db) from etesync_server.wsgi import application return application class EtesyncTests(StorageTests): supports_metadata = False @pytest.fixture def get_storage_args(self, request, get_item, tmpdir, etesync_app): import wsgi_intercept import wsgi_intercept.requests_intercept wsgi_intercept.requests_intercept.install() wsgi_intercept.add_wsgi_intercept("127.0.0.1", 8000, lambda: etesync_app) def teardown(): wsgi_intercept.remove_wsgi_intercept("127.0.0.1", 8000) wsgi_intercept.requests_intercept.uninstall() request.addfinalizer(teardown) with open( os.path.join(os.path.dirname(__file__), "test@localhost/auth_token") ) as f: token = f.read().strip() headers = {"Authorization": "Token " + token} r = requests.post( "http://127.0.0.1:8000/reset/", headers=headers, allow_redirects=False ) assert r.status_code == 200 def inner(collection="test"): rv = { "email": "test@localhost", "db_path": str(tmpdir.join("etesync.db")), "secrets_dir": os.path.dirname(__file__), "server_url": "http://127.0.0.1:8000/", } if collection is not None: rv = self.storage_class.create_collection(collection=collection, **rv) return rv return inner class TestContacts(EtesyncTests): storage_class = EtesyncContacts @pytest.fixture(params=["VCARD"]) def item_type(self, request): return request.param class TestCalendars(EtesyncTests): storage_class = EtesyncCalendars @pytest.fixture(params=["VEVENT"]) def item_type(self, request): return request.param vdirsyncer-0.18.0/tests/storage/servers/000077500000000000000000000000001406140636100202705ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/servers/__init__.py000066400000000000000000000000001406140636100223670ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/servers/baikal/000077500000000000000000000000001406140636100215135ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/servers/baikal/__init__.py000066400000000000000000000013201406140636100236200ustar00rootroot00000000000000import pytest class ServerMixin: @pytest.fixture def get_storage_args(self, request, tmpdir, slow_create_collection, baikal_server): def inner(collection="test"): base_url = "http://127.0.0.1:8002/" args = { "url": base_url, "username": "baikal", "password": "baikal", } if self.storage_class.fileext == ".vcf": args["url"] = base_url + "card.php/" else: args["url"] = base_url + "cal.php/" if collection is not None: args = slow_create_collection(self.storage_class, args, collection) return args return inner vdirsyncer-0.18.0/tests/storage/servers/davical/000077500000000000000000000000001406140636100216735ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/servers/davical/__init__.py000066400000000000000000000026421406140636100240100ustar00rootroot00000000000000import os import uuid import pytest try: caldav_args = { # Those credentials are configured through the Travis UI "username": os.environ["DAVICAL_USERNAME"].strip(), "password": os.environ["DAVICAL_PASSWORD"].strip(), "url": "https://brutus.lostpackets.de/davical-test/caldav.php/", } except KeyError as e: pytestmark = pytest.mark.skip("Missing envkey: {}".format(str(e))) @pytest.mark.flaky(reruns=5) class ServerMixin: @pytest.fixture def davical_args(self): if self.storage_class.fileext == ".ics": return dict(caldav_args) elif self.storage_class.fileext == ".vcf": pytest.skip("No carddav") else: raise RuntimeError() @pytest.fixture def get_storage_args(self, davical_args, request): def inner(collection="test"): if collection is None: return davical_args assert collection.startswith("test") for _ in range(4): args = self.storage_class.create_collection( collection + str(uuid.uuid4()), **davical_args ) s = self.storage_class(**args) if not list(s.list()): request.addfinalizer(lambda: s.session.request("DELETE", "")) return args raise RuntimeError("Failed to find free collection.") return inner vdirsyncer-0.18.0/tests/storage/servers/davical/install.sh000066400000000000000000000000411406140636100236700ustar00rootroot00000000000000pip install pytest-rerunfailures vdirsyncer-0.18.0/tests/storage/servers/fastmail/000077500000000000000000000000001406140636100220705ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/servers/fastmail/__init__.py000066400000000000000000000017651406140636100242120ustar00rootroot00000000000000import os import pytest class ServerMixin: @pytest.fixture def get_storage_args(self, item_type, slow_create_collection): if item_type == "VTODO": # Fastmail has non-standard support for TODOs # See https://github.com/pimutils/vdirsyncer/issues/824 pytest.skip("Fastmail has non-standard VTODO support.") def inner(collection="test"): args = { "username": os.environ["FASTMAIL_USERNAME"], "password": os.environ["FASTMAIL_PASSWORD"], } if self.storage_class.fileext == ".ics": args["url"] = "https://caldav.fastmail.com/" elif self.storage_class.fileext == ".vcf": args["url"] = "https://carddav.fastmail.com/" else: raise RuntimeError() if collection is not None: args = slow_create_collection(self.storage_class, args, collection) return args return inner vdirsyncer-0.18.0/tests/storage/servers/icloud/000077500000000000000000000000001406140636100215475ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/servers/icloud/__init__.py000066400000000000000000000020421406140636100236560ustar00rootroot00000000000000import os import pytest class ServerMixin: @pytest.fixture def get_storage_args(self, item_type, slow_create_collection): if item_type != "VEVENT": # iCloud collections can either be calendars or task lists. # See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 # noqa pytest.skip("iCloud doesn't support anything else than VEVENT") def inner(collection="test"): args = { "username": os.environ["ICLOUD_USERNAME"], "password": os.environ["ICLOUD_PASSWORD"], } if self.storage_class.fileext == ".ics": args["url"] = "https://caldav.icloud.com/" elif self.storage_class.fileext == ".vcf": args["url"] = "https://contacts.icloud.com/" else: raise RuntimeError() if collection is not None: args = slow_create_collection(self.storage_class, args, collection) return args return inner vdirsyncer-0.18.0/tests/storage/servers/radicale/000077500000000000000000000000001406140636100220345ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/servers/radicale/__init__.py000066400000000000000000000011121406140636100241400ustar00rootroot00000000000000import pytest class ServerMixin: @pytest.fixture def get_storage_args( self, request, tmpdir, slow_create_collection, radicale_server, ): def inner(collection="test"): url = "http://127.0.0.1:8001/" args = { "url": url, "username": "radicale", "password": "radicale", } if collection is not None: args = slow_create_collection(self.storage_class, args, collection) return args return inner vdirsyncer-0.18.0/tests/storage/servers/skip/000077500000000000000000000000001406140636100212365ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/servers/skip/__init__.py000066400000000000000000000002021406140636100233410ustar00rootroot00000000000000import pytest class ServerMixin: @pytest.fixture def get_storage_args(self): pytest.skip("DAV tests disabled.") vdirsyncer-0.18.0/tests/storage/servers/xandikos/000077500000000000000000000000001406140636100221105ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/storage/servers/xandikos/__init__.py000066400000000000000000000007331406140636100242240ustar00rootroot00000000000000import pytest class ServerMixin: @pytest.fixture def get_storage_args( self, request, tmpdir, slow_create_collection, xandikos_server, ): def inner(collection="test"): url = "http://127.0.0.1:8000/" args = {"url": url} if collection is not None: args = slow_create_collection(self.storage_class, args, collection) return args return inner vdirsyncer-0.18.0/tests/storage/test_filesystem.py000066400000000000000000000075601406140636100224040ustar00rootroot00000000000000import subprocess import pytest from . import StorageTests from vdirsyncer.storage.filesystem import FilesystemStorage from vdirsyncer.vobject import Item class TestFilesystemStorage(StorageTests): storage_class = FilesystemStorage @pytest.fixture def get_storage_args(self, tmpdir): def inner(collection="test"): rv = {"path": str(tmpdir), "fileext": ".txt", "collection": collection} if collection is not None: rv = self.storage_class.create_collection(**rv) return rv return inner def test_is_not_directory(self, tmpdir): with pytest.raises(OSError): f = tmpdir.join("hue") f.write("stub") self.storage_class(str(tmpdir) + "/hue", ".txt") def test_broken_data(self, tmpdir): s = self.storage_class(str(tmpdir), ".txt") class BrokenItem: raw = "Ц, Ш, Л, ж, Д, З, Ю".encode() uid = "jeezus" ident = uid with pytest.raises(TypeError): s.upload(BrokenItem) assert not tmpdir.listdir() def test_ident_with_slash(self, tmpdir): s = self.storage_class(str(tmpdir), ".txt") s.upload(Item("UID:a/b/c")) (item_file,) = tmpdir.listdir() assert "/" not in item_file.basename and item_file.isfile() def test_ignore_tmp_files(self, tmpdir): """Test that files with .tmp suffix beside .ics files are ignored.""" s = self.storage_class(str(tmpdir), ".ics") s.upload(Item("UID:xyzxyz")) (item_file,) = tmpdir.listdir() item_file.copy(item_file.new(ext="tmp")) assert len(tmpdir.listdir()) == 2 assert len(list(s.list())) == 1 def test_ignore_tmp_files_empty_fileext(self, tmpdir): """Test that files with .tmp suffix are ignored with empty fileext.""" s = self.storage_class(str(tmpdir), "") s.upload(Item("UID:xyzxyz")) (item_file,) = tmpdir.listdir() item_file.copy(item_file.new(ext="tmp")) assert len(tmpdir.listdir()) == 2 # assert False, tmpdir.listdir() # enable to see the created filename assert len(list(s.list())) == 1 def test_ignore_files_typical_backup(self, tmpdir): """Test file-name ignorance with typical backup ending ~.""" ignorext = "~" # without dot storage = self.storage_class(str(tmpdir), "", fileignoreext=ignorext) storage.upload(Item("UID:xyzxyz")) (item_file,) = tmpdir.listdir() item_file.copy(item_file.new(basename=item_file.basename + ignorext)) assert len(tmpdir.listdir()) == 2 assert len(list(storage.list())) == 1 def test_too_long_uid(self, tmpdir): storage = self.storage_class(str(tmpdir), ".txt") item = Item("UID:" + "hue" * 600) href, etag = storage.upload(item) assert item.uid not in href def test_post_hook_inactive(self, tmpdir, monkeypatch): def check_call_mock(*args, **kwargs): raise AssertionError() monkeypatch.setattr(subprocess, "call", check_call_mock) s = self.storage_class(str(tmpdir), ".txt", post_hook=None) s.upload(Item("UID:a/b/c")) def test_post_hook_active(self, tmpdir, monkeypatch): calls = [] exe = "foo" def check_call_mock(call, *args, **kwargs): calls.append(True) assert len(call) == 2 assert call[0] == exe monkeypatch.setattr(subprocess, "call", check_call_mock) s = self.storage_class(str(tmpdir), ".txt", post_hook=exe) s.upload(Item("UID:a/b/c")) assert calls def test_ignore_git_dirs(self, tmpdir): tmpdir.mkdir(".git").mkdir("foo") tmpdir.mkdir("a") tmpdir.mkdir("b") assert {c["collection"] for c in self.storage_class.discover(str(tmpdir))} == { "a", "b", } vdirsyncer-0.18.0/tests/storage/test_http.py000066400000000000000000000071431406140636100211740ustar00rootroot00000000000000import pytest from requests import Response from tests import normalize_item from vdirsyncer.exceptions import UserError from vdirsyncer.storage.http import HttpStorage from vdirsyncer.storage.http import prepare_auth def test_list(monkeypatch): collection_url = "http://127.0.0.1/calendar/collection.ics" items = [ ( "BEGIN:VEVENT\n" "SUMMARY:Eine Kurzinfo\n" "DESCRIPTION:Beschreibung des Termines\n" "END:VEVENT" ), ( "BEGIN:VEVENT\n" "SUMMARY:Eine zweite Küèrzinfo\n" "DESCRIPTION:Beschreibung des anderen Termines\n" "BEGIN:VALARM\n" "ACTION:AUDIO\n" "TRIGGER:19980403T120000\n" "ATTACH;FMTTYPE=audio/basic:http://host.com/pub/ssbanner.aud\n" "REPEAT:4\n" "DURATION:PT1H\n" "END:VALARM\n" "END:VEVENT" ), ] responses = ["\n".join(["BEGIN:VCALENDAR"] + items + ["END:VCALENDAR"])] * 2 def get(self, method, url, *a, **kw): assert method == "GET" assert url == collection_url r = Response() r.status_code = 200 assert responses r._content = responses.pop().encode("utf-8") r.headers["Content-Type"] = "text/calendar" r.encoding = "ISO-8859-1" return r monkeypatch.setattr("requests.sessions.Session.request", get) s = HttpStorage(url=collection_url) found_items = {} for href, etag in s.list(): item, etag2 = s.get(href) assert item.uid is not None assert etag2 == etag found_items[normalize_item(item)] = href expected = { normalize_item("BEGIN:VCALENDAR\n" + x + "\nEND:VCALENDAR") for x in items } assert set(found_items) == expected for href, etag in s.list(): item, etag2 = s.get(href) assert item.uid is not None assert etag2 == etag assert found_items[normalize_item(item)] == href def test_readonly_param(): url = "http://example.com/" with pytest.raises(ValueError): HttpStorage(url=url, read_only=False) a = HttpStorage(url=url, read_only=True).read_only b = HttpStorage(url=url, read_only=None).read_only assert a is b is True def test_prepare_auth(): assert prepare_auth(None, "", "") is None assert prepare_auth(None, "user", "pwd") == ("user", "pwd") assert prepare_auth("basic", "user", "pwd") == ("user", "pwd") with pytest.raises(ValueError) as excinfo: assert prepare_auth("basic", "", "pwd") assert "you need to specify username and password" in str(excinfo.value).lower() from requests.auth import HTTPDigestAuth assert isinstance(prepare_auth("digest", "user", "pwd"), HTTPDigestAuth) with pytest.raises(ValueError) as excinfo: prepare_auth("ladida", "user", "pwd") assert "unknown authentication method" in str(excinfo.value).lower() def test_prepare_auth_guess(monkeypatch): import requests_toolbelt.auth.guess assert isinstance( prepare_auth("guess", "user", "pwd"), requests_toolbelt.auth.guess.GuessAuth ) monkeypatch.delattr(requests_toolbelt.auth.guess, "GuessAuth") with pytest.raises(UserError) as excinfo: prepare_auth("guess", "user", "pwd") assert "requests_toolbelt is too old" in str(excinfo.value).lower() def test_verify_false_disallowed(): with pytest.raises(ValueError) as excinfo: HttpStorage(url="http://example.com", verify=False) assert "forbidden" in str(excinfo.value).lower() assert "consider setting verify_fingerprint" in str(excinfo.value).lower() vdirsyncer-0.18.0/tests/storage/test_http_with_singlefile.py000066400000000000000000000044711406140636100244310ustar00rootroot00000000000000import pytest from requests import Response import vdirsyncer.storage.http from . import StorageTests from vdirsyncer.storage.base import Storage from vdirsyncer.storage.singlefile import SingleFileStorage class CombinedStorage(Storage): """A subclass of HttpStorage to make testing easier. It supports writes via SingleFileStorage.""" _repr_attributes = ("url", "path") storage_name = "http_and_singlefile" def __init__(self, url, path, **kwargs): if kwargs.get("collection", None) is not None: raise ValueError() super().__init__(**kwargs) self.url = url self.path = path self._reader = vdirsyncer.storage.http.HttpStorage(url=url) self._reader._ignore_uids = False self._writer = SingleFileStorage(path=path) def list(self, *a, **kw): return self._reader.list(*a, **kw) def get(self, *a, **kw): self.list() return self._reader.get(*a, **kw) def upload(self, *a, **kw): return self._writer.upload(*a, **kw) def update(self, *a, **kw): return self._writer.update(*a, **kw) def delete(self, *a, **kw): return self._writer.delete(*a, **kw) class TestHttpStorage(StorageTests): storage_class = CombinedStorage supports_collections = False supports_metadata = False @pytest.fixture(autouse=True) def setup_tmpdir(self, tmpdir, monkeypatch): self.tmpfile = str(tmpdir.ensure("collection.txt")) def _request(method, url, *args, **kwargs): assert method == "GET" assert url == "http://localhost:123/collection.txt" assert "vdirsyncer" in kwargs["headers"]["User-Agent"] r = Response() r.status_code = 200 try: with open(self.tmpfile, "rb") as f: r._content = f.read() except OSError: r._content = b"" r.headers["Content-Type"] = "text/calendar" r.encoding = "utf-8" return r monkeypatch.setattr(vdirsyncer.storage.http, "request", _request) @pytest.fixture def get_storage_args(self): def inner(collection=None): assert collection is None return {"url": "http://localhost:123/collection.txt", "path": self.tmpfile} return inner vdirsyncer-0.18.0/tests/storage/test_memory.py000066400000000000000000000004371406140636100215240ustar00rootroot00000000000000import pytest from . import StorageTests from vdirsyncer.storage.memory import MemoryStorage class TestMemoryStorage(StorageTests): storage_class = MemoryStorage supports_collections = False @pytest.fixture def get_storage_args(self): return lambda **kw: kw vdirsyncer-0.18.0/tests/storage/test_singlefile.py000066400000000000000000000010361406140636100223310ustar00rootroot00000000000000import pytest from . import StorageTests from vdirsyncer.storage.singlefile import SingleFileStorage class TestSingleFileStorage(StorageTests): storage_class = SingleFileStorage supports_metadata = False @pytest.fixture def get_storage_args(self, tmpdir): def inner(collection="test"): rv = {"path": str(tmpdir.join("%s.txt")), "collection": collection} if collection is not None: rv = self.storage_class.create_collection(**rv) return rv return inner vdirsyncer-0.18.0/tests/system/000077500000000000000000000000001406140636100164575ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/system/cli/000077500000000000000000000000001406140636100172265ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/system/cli/__init__.py000066400000000000000000000000001406140636100213250ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/system/cli/conftest.py000066400000000000000000000014351406140636100214300ustar00rootroot00000000000000from textwrap import dedent import pytest from click.testing import CliRunner import vdirsyncer.cli as cli class _CustomRunner: def __init__(self, tmpdir): self.tmpdir = tmpdir self.cfg = tmpdir.join("config") self.runner = CliRunner() def invoke(self, args, env=None, **kwargs): env = env or {} env.setdefault("VDIRSYNCER_CONFIG", str(self.cfg)) return self.runner.invoke(cli.app, args, env=env, **kwargs) def write_with_general(self, data): self.cfg.write( dedent( """ [general] status_path = "{}/status/" """ ).format(str(self.tmpdir)) ) self.cfg.write(data, mode="a") @pytest.fixture def runner(tmpdir): return _CustomRunner(tmpdir) vdirsyncer-0.18.0/tests/system/cli/test_config.py000066400000000000000000000117411406140636100221100ustar00rootroot00000000000000import io from textwrap import dedent import pytest from vdirsyncer import cli from vdirsyncer import exceptions from vdirsyncer.cli.config import Config invalid = object() @pytest.fixture def read_config(tmpdir, monkeypatch): def inner(cfg): errors = [] monkeypatch.setattr("vdirsyncer.cli.cli_logger.error", errors.append) f = io.StringIO(dedent(cfg.format(base=str(tmpdir)))) rv = Config.from_fileobject(f) monkeypatch.undo() return errors, rv return inner def test_read_config(read_config): errors, c = read_config( """ [general] status_path = "/tmp/status/" [pair bob] a = "bob_a" b = "bob_b" collections = null [storage bob_a] type = "filesystem" path = "/tmp/contacts/" fileext = ".vcf" yesno = false number = 42 [storage bob_b] type = "carddav" """ ) assert c.general == {"status_path": "/tmp/status/"} assert set(c.pairs) == {"bob"} bob = c.pairs["bob"] assert bob.collections is None assert c.storages == { "bob_a": { "type": "filesystem", "path": "/tmp/contacts/", "fileext": ".vcf", "yesno": False, "number": 42, "instance_name": "bob_a", }, "bob_b": {"type": "carddav", "instance_name": "bob_b"}, } def test_missing_collections_param(read_config): with pytest.raises(exceptions.UserError) as excinfo: read_config( """ [general] status_path = "/tmp/status/" [pair bob] a = "bob_a" b = "bob_b" [storage bob_a] type = "lmao" [storage bob_b] type = "lmao" """ ) assert "collections parameter missing" in str(excinfo.value) def test_invalid_section_type(read_config): with pytest.raises(exceptions.UserError) as excinfo: read_config( """ [general] status_path = "/tmp/status/" [bogus] """ ) assert "Unknown section" in str(excinfo.value) assert "bogus" in str(excinfo.value) def test_missing_general_section(read_config): with pytest.raises(exceptions.UserError) as excinfo: read_config( """ [pair my_pair] a = "my_a" b = "my_b" collections = null [storage my_a] type = "filesystem" path = "{base}/path_a/" fileext = ".txt" [storage my_b] type = "filesystem" path = "{base}/path_b/" fileext = ".txt" """ ) assert "Invalid general section." in str(excinfo.value) def test_wrong_general_section(read_config): with pytest.raises(exceptions.UserError) as excinfo: read_config( """ [general] wrong = true """ ) assert "Invalid general section." in str(excinfo.value) assert excinfo.value.problems == [ "general section doesn't take the parameters: wrong", "general section is missing the parameters: status_path", ] def test_invalid_storage_name(read_config): with pytest.raises(exceptions.UserError) as excinfo: read_config( """ [general] status_path = "{base}/status/" [storage foo.bar] """ ) assert "invalid characters" in str(excinfo.value).lower() def test_invalid_collections_arg(read_config): with pytest.raises(exceptions.UserError) as excinfo: read_config( """ [general] status_path = "/tmp/status/" [pair foobar] a = "foo" b = "bar" collections = [null] [storage foo] type = "filesystem" path = "/tmp/foo/" fileext = ".txt" [storage bar] type = "filesystem" path = "/tmp/bar/" fileext = ".txt" """ ) assert "Expected string" in str(excinfo.value) def test_duplicate_sections(read_config): with pytest.raises(exceptions.UserError) as excinfo: read_config( """ [general] status_path = "/tmp/status/" [pair foobar] a = "foobar" b = "bar" collections = null [storage foobar] type = "filesystem" path = "/tmp/foo/" fileext = ".txt" [storage bar] type = "filesystem" path = "/tmp/bar/" fileext = ".txt" """ ) assert 'Name "foobar" already used' in str(excinfo.value) def test_validate_collections_param(): x = cli.config._validate_collections_param x(None) x(["c", "a", "b"]) pytest.raises(ValueError, x, [None]) pytest.raises(ValueError, x, ["a", "a", "a"]) pytest.raises(ValueError, x, [[None, "a", "b"]]) x([["c", None, "b"]]) x([["c", "a", None]]) x([["c", None, None]]) vdirsyncer-0.18.0/tests/system/cli/test_discover.py000066400000000000000000000124501406140636100224570ustar00rootroot00000000000000import json from textwrap import dedent import pytest from vdirsyncer import exceptions from vdirsyncer.storage.base import Storage def test_discover_command(tmpdir, runner): runner.write_with_general( dedent( """ [storage foo] type = "filesystem" path = "{0}/foo/" fileext = ".txt" [storage bar] type = "filesystem" path = "{0}/bar/" fileext = ".txt" [pair foobar] a = "foo" b = "bar" collections = ["from a"] """ ).format(str(tmpdir)) ) foo = tmpdir.mkdir("foo") bar = tmpdir.mkdir("bar") for x in "abc": foo.mkdir(x) bar.mkdir(x) bar.mkdir("d") result = runner.invoke(["discover"]) assert not result.exception foo.mkdir("d") result = runner.invoke(["sync"]) assert not result.exception lines = result.output.splitlines() assert "Syncing foobar/a" in lines assert "Syncing foobar/b" in lines assert "Syncing foobar/c" in lines assert "Syncing foobar/d" not in result.output result = runner.invoke(["discover"]) assert not result.exception result = runner.invoke(["sync"]) assert not result.exception assert "Syncing foobar/a" in lines assert "Syncing foobar/b" in lines assert "Syncing foobar/c" in lines assert "Syncing foobar/d" in result.output # Check for redundant data that is already in the config. This avoids # copying passwords from the config too. assert "fileext" not in tmpdir.join("status").join("foobar.collections").read() def test_discover_different_collection_names(tmpdir, runner): foo = tmpdir.mkdir("foo") bar = tmpdir.mkdir("bar") runner.write_with_general( dedent( """ [storage foo] type = "filesystem" fileext = ".txt" path = "{foo}" [storage bar] type = "filesystem" fileext = ".txt" path = "{bar}" [pair foobar] a = "foo" b = "bar" collections = [ ["coll1", "coll_a1", "coll_b1"], "coll2" ] """ ).format(foo=str(foo), bar=str(bar)) ) result = runner.invoke(["discover"], input="y\n" * 6) assert not result.exception coll_a1 = foo.join("coll_a1") coll_b1 = bar.join("coll_b1") assert coll_a1.exists() assert coll_b1.exists() result = runner.invoke(["sync"]) assert not result.exception foo_txt = coll_a1.join("foo.txt") foo_txt.write("BEGIN:VCALENDAR\nUID:foo\nEND:VCALENDAR") result = runner.invoke(["sync"]) assert not result.exception assert foo_txt.exists() assert coll_b1.join("foo.txt").exists() def test_discover_direct_path(tmpdir, runner): foo = tmpdir.join("foo") bar = tmpdir.join("bar") runner.write_with_general( dedent( """ [storage foo] type = "filesystem" fileext = ".txt" path = "{foo}" [storage bar] type = "filesystem" fileext = ".txt" path = "{bar}" [pair foobar] a = "foo" b = "bar" collections = null """ ).format(foo=str(foo), bar=str(bar)) ) result = runner.invoke(["discover"], input="y\n" * 2) assert not result.exception result = runner.invoke(["sync"]) assert not result.exception assert foo.exists() assert bar.exists() def test_null_collection_with_named_collection(tmpdir, runner): runner.write_with_general( dedent( """ [pair foobar] a = "foo" b = "bar" collections = [["baz", "baz", null]] [storage foo] type = "filesystem" path = "{base}/foo/" fileext = ".txt" [storage bar] type = "singlefile" path = "{base}/bar.txt" """.format( base=str(tmpdir) ) ) ) result = runner.invoke(["discover"], input="y\n" * 2) assert not result.exception foo = tmpdir.join("foo") foobaz = foo.join("baz") assert foo.exists() assert foobaz.exists() bar = tmpdir.join("bar.txt") assert bar.exists() foobaz.join("lol.txt").write("BEGIN:VCARD\nUID:HAHA\nEND:VCARD") result = runner.invoke(["sync"]) assert not result.exception assert "HAHA" in bar.read() @pytest.mark.parametrize( "a_requires,b_requires", [ (True, True), (True, False), (False, True), (False, False), ], ) def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch): class TestStorage(Storage): storage_name = "test" def __init__(self, require_collection, **kw): if require_collection: assert not kw.get("collection") raise exceptions.CollectionRequired() from vdirsyncer.cli.utils import storage_names monkeypatch.setitem(storage_names._storages, "test", TestStorage) runner.write_with_general( dedent( """ [pair foobar] a = "foo" b = "bar" collections = null [storage foo] type = "test" require_collection = {a} [storage bar] type = "test" require_collection = {b} """.format( a=json.dumps(a_requires), b=json.dumps(b_requires) ) ) ) result = runner.invoke(["discover"]) if a_requires or b_requires: assert result.exception assert ( "One or more storages don't support `collections = null`." in result.output ) vdirsyncer-0.18.0/tests/system/cli/test_fetchparams.py000066400000000000000000000025211406140636100231340ustar00rootroot00000000000000from textwrap import dedent def test_get_password_from_command(tmpdir, runner): runner.write_with_general( dedent( """ [pair foobar] a = "foo" b = "bar" collections = ["a", "b", "c"] [storage foo] type = "filesystem" path = "{base}/foo/" fileext.fetch = ["command", "echo", ".txt"] [storage bar] type = "filesystem" path = "{base}/bar/" fileext.fetch = ["prompt", "Fileext for bar"] """.format( base=str(tmpdir) ) ) ) foo = tmpdir.ensure("foo", dir=True) foo.ensure("a", dir=True) foo.ensure("b", dir=True) foo.ensure("c", dir=True) bar = tmpdir.ensure("bar", dir=True) bar.ensure("a", dir=True) bar.ensure("b", dir=True) bar.ensure("c", dir=True) result = runner.invoke(["discover"], input=".asdf\n") assert not result.exception status = tmpdir.join("status").join("foobar.collections").read() assert "foo" in status assert "bar" in status assert "asdf" not in status assert "txt" not in status foo.join("a").join("foo.txt").write("BEGIN:VCARD\nUID:foo\nEND:VCARD") result = runner.invoke(["sync"], input=".asdf\n") assert not result.exception assert [x.basename for x in bar.join("a").listdir()] == ["foo.asdf"] vdirsyncer-0.18.0/tests/system/cli/test_repair.py000066400000000000000000000040171406140636100221230ustar00rootroot00000000000000from textwrap import dedent import pytest @pytest.fixture def storage(tmpdir, runner): runner.write_with_general( dedent( """ [storage foo] type = "filesystem" path = "{base}/foo/" fileext = ".txt" """ ).format(base=str(tmpdir)) ) return tmpdir.mkdir("foo") @pytest.mark.parametrize("collection", [None, "foocoll"]) def test_basic(storage, runner, collection): if collection is not None: storage = storage.mkdir(collection) collection_arg = f"foo/{collection}" else: collection_arg = "foo" argv = ["repair", collection_arg] result = runner.invoke(argv, input="y") assert not result.exception storage.join("item.txt").write("BEGIN:VCARD\nEND:VCARD") storage.join("toobroken.txt").write("") result = runner.invoke(argv, input="y") assert not result.exception assert "No UID" in result.output assert "'toobroken.txt' is malformed beyond repair" in result.output (new_fname,) = [x for x in storage.listdir() if "toobroken" not in str(x)] assert "UID:" in new_fname.read() @pytest.mark.parametrize("repair_uids", [None, True, False]) def test_repair_uids(storage, runner, repair_uids): f = storage.join("baduid.txt") orig_f = "BEGIN:VCARD\nUID:!!!!!\nEND:VCARD" f.write(orig_f) if repair_uids is None: opt = [] elif repair_uids: opt = ["--repair-unsafe-uid"] else: opt = ["--no-repair-unsafe-uid"] result = runner.invoke(["repair"] + opt + ["foo"], input="y") assert not result.exception if repair_uids: assert "UID or href is unsafe, assigning random UID" in result.output assert not f.exists() (new_f,) = storage.listdir() s = new_f.read() assert s.startswith("BEGIN:VCARD") assert s.endswith("END:VCARD") assert s != orig_f else: assert ( "UID may cause problems, add --repair-unsafe-uid to repair." in result.output ) assert f.read() == orig_f vdirsyncer-0.18.0/tests/system/cli/test_sync.py000066400000000000000000000344071406140636100216230ustar00rootroot00000000000000import json import sys from textwrap import dedent import pytest def test_simple_run(tmpdir, runner): runner.write_with_general( dedent( """ [pair my_pair] a = "my_a" b = "my_b" collections = null [storage my_a] type = "filesystem" path = "{0}/path_a/" fileext = ".txt" [storage my_b] type = "filesystem" path = "{0}/path_b/" fileext = ".txt" """ ).format(str(tmpdir)) ) tmpdir.mkdir("path_a") tmpdir.mkdir("path_b") result = runner.invoke(["discover"]) assert not result.exception result = runner.invoke(["sync"]) assert not result.exception tmpdir.join("path_a/haha.txt").write("UID:haha") result = runner.invoke(["sync"]) assert "Copying (uploading) item haha to my_b" in result.output assert tmpdir.join("path_b/haha.txt").read() == "UID:haha" def test_sync_inexistant_pair(tmpdir, runner): runner.write_with_general("") result = runner.invoke(["sync", "foo"]) assert result.exception assert "pair foo does not exist." in result.output.lower() def test_debug_connections(tmpdir, runner): runner.write_with_general( dedent( """ [pair my_pair] a = "my_a" b = "my_b" collections = null [storage my_a] type = "filesystem" path = "{0}/path_a/" fileext = ".txt" [storage my_b] type = "filesystem" path = "{0}/path_b/" fileext = ".txt" """ ).format(str(tmpdir)) ) tmpdir.mkdir("path_a") tmpdir.mkdir("path_b") result = runner.invoke(["discover"]) assert not result.exception result = runner.invoke(["-vdebug", "sync", "--max-workers=3"]) assert "using 3 maximal workers" in result.output.lower() result = runner.invoke(["-vdebug", "sync"]) assert "using 1 maximal workers" in result.output.lower() def test_empty_storage(tmpdir, runner): runner.write_with_general( dedent( """ [pair my_pair] a = "my_a" b = "my_b" collections = null [storage my_a] type = "filesystem" path = "{0}/path_a/" fileext = ".txt" [storage my_b] type = "filesystem" path = "{0}/path_b/" fileext = ".txt" """ ).format(str(tmpdir)) ) tmpdir.mkdir("path_a") tmpdir.mkdir("path_b") result = runner.invoke(["discover"]) assert not result.exception result = runner.invoke(["sync"]) assert not result.exception tmpdir.join("path_a/haha.txt").write("UID:haha") result = runner.invoke(["sync"]) assert not result.exception tmpdir.join("path_b/haha.txt").remove() result = runner.invoke(["sync"]) lines = result.output.splitlines() assert lines[0] == "Syncing my_pair" assert lines[1].startswith( "error: my_pair: " 'Storage "my_b" was completely emptied.' ) assert result.exception def test_verbosity(tmpdir, runner): runner.write_with_general("") result = runner.invoke(["--verbosity=HAHA", "sync"]) assert result.exception assert ( 'invalid value for "--verbosity"' in result.output.lower() or "invalid value for '--verbosity'" in result.output.lower() ) def test_collections_cache_invalidation(tmpdir, runner): foo = tmpdir.mkdir("foo") bar = tmpdir.mkdir("bar") for x in "abc": foo.mkdir(x) bar.mkdir(x) runner.write_with_general( dedent( """ [storage foo] type = "filesystem" path = "{0}/foo/" fileext = ".txt" [storage bar] type = "filesystem" path = "{0}/bar/" fileext = ".txt" [pair foobar] a = "foo" b = "bar" collections = ["a", "b", "c"] """ ).format(str(tmpdir)) ) foo.join("a/itemone.txt").write("UID:itemone") result = runner.invoke(["discover"]) assert not result.exception result = runner.invoke(["sync"]) assert not result.exception assert "detected change in config file" not in result.output.lower() rv = bar.join("a").listdir() assert len(rv) == 1 assert rv[0].basename == "itemone.txt" runner.write_with_general( dedent( """ [storage foo] type = "filesystem" path = "{0}/foo/" fileext = ".txt" [storage bar] type = "filesystem" path = "{0}/bar2/" fileext = ".txt" [pair foobar] a = "foo" b = "bar" collections = ["a", "b", "c"] """ ).format(str(tmpdir)) ) for entry in tmpdir.join("status").listdir(): if not str(entry).endswith(".collections"): entry.remove() bar2 = tmpdir.mkdir("bar2") for x in "abc": bar2.mkdir(x) result = runner.invoke(["sync"]) assert "detected change in config file" in result.output.lower() assert result.exception result = runner.invoke(["discover"]) assert not result.exception result = runner.invoke(["sync"]) assert not result.exception rv = bar.join("a").listdir() rv2 = bar2.join("a").listdir() assert len(rv) == len(rv2) == 1 assert rv[0].basename == rv2[0].basename == "itemone.txt" def test_invalid_pairs_as_cli_arg(tmpdir, runner): runner.write_with_general( dedent( """ [storage foo] type = "filesystem" path = "{0}/foo/" fileext = ".txt" [storage bar] type = "filesystem" path = "{0}/bar/" fileext = ".txt" [pair foobar] a = "foo" b = "bar" collections = ["a", "b", "c"] """ ).format(str(tmpdir)) ) for base in ("foo", "bar"): base = tmpdir.mkdir(base) for c in "abc": base.mkdir(c) result = runner.invoke(["discover"]) assert not result.exception result = runner.invoke(["sync", "foobar/d"]) assert result.exception assert 'pair foobar: collection "d" not found' in result.output.lower() def test_multiple_pairs(tmpdir, runner): def get_cfg(): for name_a, name_b in ("foo", "bar"), ("bam", "baz"): yield dedent( """ [pair {a}{b}] a = "{a}" b = "{b}" collections = null """ ).format(a=name_a, b=name_b) for name in name_a, name_b: yield dedent( """ [storage {name}] type = "filesystem" path = "{path}" fileext = ".txt" """ ).format(name=name, path=str(tmpdir.mkdir(name))) runner.write_with_general("".join(get_cfg())) result = runner.invoke(["discover"]) assert not result.exception assert set(result.output.splitlines()) > { "Discovering collections for pair bambaz", "Discovering collections for pair foobar", } result = runner.invoke(["sync"]) assert not result.exception assert set(result.output.splitlines()) == { "Syncing bambaz", "Syncing foobar", } # XXX: https://github.com/pimutils/vdirsyncer/issues/617 @pytest.mark.skipif(sys.platform == "darwin", reason="This test inexplicably fails") @pytest.mark.parametrize( "collections", [ ("a", "A"), ("\ufffe",), ("Hello there!",), ("Österreich",), ("中国", "x1"), ("한글",), ("42a4ec99-b1c2-4859-b142-759112f2ca50",), ("فلسطين",), ], ) def test_create_collections(collections, tmpdir, runner): runner.write_with_general( dedent( """ [pair foobar] a = "foo" b = "bar" collections = {colls} [storage foo] type = "filesystem" path = "{base}/foo/" fileext = ".txt" [storage bar] type = "filesystem" path = "{base}/bar/" fileext = ".txt" """.format( base=str(tmpdir), colls=json.dumps(list(collections)) ) ) ) result = runner.invoke(["discover"], input="y\n" * 2 * (len(collections) + 1)) assert not result.exception, result.output result = runner.invoke(["sync"] + ["foobar/" + x for x in collections]) assert not result.exception, result.output assert {x.basename for x in tmpdir.join("foo").listdir()} == { x.basename for x in tmpdir.join("bar").listdir() } def test_ident_conflict(tmpdir, runner): runner.write_with_general( dedent( """ [pair foobar] a = "foo" b = "bar" collections = null [storage foo] type = "filesystem" path = "{base}/foo/" fileext = ".txt" [storage bar] type = "filesystem" path = "{base}/bar/" fileext = ".txt" """.format( base=str(tmpdir) ) ) ) foo = tmpdir.mkdir("foo") tmpdir.mkdir("bar") foo.join("one.txt").write("UID:1") foo.join("two.txt").write("UID:1") foo.join("three.txt").write("UID:1") result = runner.invoke(["discover"]) assert not result.exception result = runner.invoke(["sync"]) assert result.exception assert ( 'error: foobar: Storage "foo" contains multiple items with the ' "same UID or even content" ) in result.output assert ( sorted( [ "one.txt" in result.output, "two.txt" in result.output, "three.txt" in result.output, ] ) == [False, True, True] ) @pytest.mark.parametrize( "existing,missing", [ ("foo", "bar"), ("bar", "foo"), ], ) def test_unknown_storage(tmpdir, runner, existing, missing): runner.write_with_general( dedent( """ [pair foobar] a = "foo" b = "bar" collections = null [storage {existing}] type = "filesystem" path = "{base}/{existing}/" fileext = ".txt" """.format( base=str(tmpdir), existing=existing ) ) ) tmpdir.mkdir(existing) result = runner.invoke(["discover"]) assert result.exception assert ( "Storage '{missing}' not found. " "These are the configured storages: ['{existing}']".format( missing=missing, existing=existing ) ) in result.output @pytest.mark.parametrize("cmd", ["sync", "metasync"]) def test_no_configured_pairs(tmpdir, runner, cmd): runner.write_with_general("") result = runner.invoke([cmd]) assert result.output == "critical: Nothing to do.\n" assert result.exception.code == 5 @pytest.mark.parametrize( "resolution,expect_foo,expect_bar", [(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")], ) def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar): runner.write_with_general( dedent( """ [pair foobar] a = "foo" b = "bar" collections = null conflict_resolution = {val} [storage foo] type = "filesystem" fileext = ".txt" path = "{base}/foo" [storage bar] type = "filesystem" fileext = ".txt" path = "{base}/bar" """.format( base=str(tmpdir), val=json.dumps(resolution) ) ) ) foo = tmpdir.join("foo") bar = tmpdir.join("bar") fooitem = foo.join("lol.txt").ensure() fooitem.write("UID:lol\nfööcontent") baritem = bar.join("lol.txt").ensure() baritem.write("UID:lol\nbööcontent") r = runner.invoke(["discover"]) assert not r.exception r = runner.invoke(["sync"]) assert not r.exception assert fooitem.read() == expect_foo assert baritem.read() == expect_bar @pytest.mark.parametrize("partial_sync", ["error", "ignore", "revert", None]) def test_partial_sync(tmpdir, runner, partial_sync): runner.write_with_general( dedent( """ [pair foobar] a = "foo" b = "bar" collections = null {partial_sync} [storage foo] type = "filesystem" fileext = ".txt" path = "{base}/foo" [storage bar] type = "filesystem" read_only = true fileext = ".txt" path = "{base}/bar" """.format( partial_sync=( f'partial_sync = "{partial_sync}"\n' if partial_sync else "" ), base=str(tmpdir), ) ) ) foo = tmpdir.mkdir("foo") bar = tmpdir.mkdir("bar") foo.join("other.txt").write("UID:other") bar.join("other.txt").write("UID:other") baritem = bar.join("lol.txt") baritem.write("UID:lol") r = runner.invoke(["discover"]) assert not r.exception r = runner.invoke(["sync"]) assert not r.exception fooitem = foo.join("lol.txt") fooitem.remove() r = runner.invoke(["sync"]) if partial_sync == "error": assert r.exception assert "Attempted change" in r.output elif partial_sync == "ignore": assert baritem.exists() r = runner.invoke(["sync"]) assert not r.exception assert baritem.exists() else: assert baritem.exists() r = runner.invoke(["sync"]) assert not r.exception assert baritem.exists() assert fooitem.exists() def test_fetch_only_necessary_params(tmpdir, runner): fetched_file = tmpdir.join("fetched_flag") fetch_script = tmpdir.join("fetch_script") fetch_script.write( dedent( """ set -e touch "{}" echo ".txt" """.format( str(fetched_file) ) ) ) runner.write_with_general( dedent( """ [pair foobar] a = "foo" b = "bar" collections = null [pair bambar] a = "bam" b = "bar" collections = null [storage foo] type = "filesystem" path = "{path}" fileext = ".txt" [storage bar] type = "filesystem" path = "{path}" fileext = ".txt" [storage bam] type = "filesystem" path = "{path}" fileext.fetch = ["command", "sh", "{script}"] """.format( path=str(tmpdir.mkdir("bogus")), script=str(fetch_script) ) ) ) def fetched(): try: fetched_file.remove() return True except Exception: return False r = runner.invoke(["discover"]) assert not r.exception assert fetched() r = runner.invoke(["sync", "foobar"]) assert not r.exception assert not fetched() r = runner.invoke(["sync"]) assert not r.exception assert fetched() r = runner.invoke(["sync", "bambar"]) assert not r.exception assert fetched() vdirsyncer-0.18.0/tests/system/cli/test_utils.py000066400000000000000000000014251406140636100220010ustar00rootroot00000000000000from vdirsyncer import exceptions from vdirsyncer.cli.utils import handle_cli_error from vdirsyncer.cli.utils import storage_instance_from_config from vdirsyncer.cli.utils import storage_names def test_handle_cli_error(capsys): try: raise exceptions.InvalidResponse("ayy lmao") except BaseException: handle_cli_error() out, err = capsys.readouterr() assert "returned something vdirsyncer doesn't understand" in err assert "ayy lmao" in err def test_storage_instance_from_config(monkeypatch): def lol(**kw): assert kw == {"foo": "bar", "baz": 1} return "OK" monkeypatch.setitem(storage_names._storages, "lol", lol) config = {"type": "lol", "foo": "bar", "baz": 1} assert storage_instance_from_config(config) == "OK" vdirsyncer-0.18.0/tests/system/utils/000077500000000000000000000000001406140636100176175ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/system/utils/test_main.py000066400000000000000000000046571406140636100221700ustar00rootroot00000000000000import logging import sys import click_log import pytest import requests from vdirsyncer import http from vdirsyncer import utils @pytest.fixture(autouse=True) def no_debug_output(request): logger = click_log.basic_config("vdirsyncer") logger.setLevel(logging.WARNING) def test_get_storage_init_args(): from vdirsyncer.storage.memory import MemoryStorage all, required = utils.get_storage_init_args(MemoryStorage) assert all == {"fileext", "collection", "read_only", "instance_name"} assert not required def test_request_ssl(): with pytest.raises(requests.exceptions.ConnectionError) as excinfo: http.request("GET", "https://self-signed.badssl.com/") assert "certificate verify failed" in str(excinfo.value) http.request("GET", "https://self-signed.badssl.com/", verify=False) def _fingerprints_broken(): from pkg_resources import parse_version as ver broken_urllib3 = ver(requests.__version__) <= ver("2.5.1") return broken_urllib3 @pytest.mark.skipif( _fingerprints_broken(), reason="https://github.com/shazow/urllib3/issues/529" ) @pytest.mark.parametrize( "fingerprint", [ "94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA", "19:90:F7:23:94:F2:EF:AB:2B:64:2D:57:3D:25:95:2D", ], ) def test_request_ssl_fingerprints(httpsserver, fingerprint): httpsserver.serve_content("") # we need to serve something http.request("GET", httpsserver.url, verify=False, verify_fingerprint=fingerprint) with pytest.raises(requests.exceptions.ConnectionError) as excinfo: http.request("GET", httpsserver.url, verify_fingerprint=fingerprint) with pytest.raises(requests.exceptions.ConnectionError) as excinfo: http.request( "GET", httpsserver.url, verify=False, verify_fingerprint="".join(reversed(fingerprint)), ) assert "Fingerprints did not match" in str(excinfo.value) def test_open_graphical_browser(monkeypatch): import webbrowser # Just assert that this internal attribute still exists and behaves the way # expected if sys.version_info < (3, 7): iter(webbrowser._tryorder) else: assert webbrowser._tryorder is None monkeypatch.setattr("webbrowser._tryorder", []) with pytest.raises(RuntimeError) as excinfo: utils.open_graphical_browser("http://example.com") assert "No graphical browser found" in str(excinfo.value) vdirsyncer-0.18.0/tests/unit/000077500000000000000000000000001406140636100161125ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/unit/cli/000077500000000000000000000000001406140636100166615ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/unit/cli/test_config.py000066400000000000000000000012641406140636100215420ustar00rootroot00000000000000import os from vdirsyncer.cli.config import _resolve_conflict_via_command from vdirsyncer.vobject import Item def test_conflict_resolution_command(): def check_call(command): command, a_tmp, b_tmp = command assert command == os.path.expanduser("~/command") with open(a_tmp) as f: assert f.read() == a.raw with open(b_tmp) as f: assert f.read() == b.raw with open(b_tmp, "w") as f: f.write(a.raw) a = Item("UID:AAAAAAA") b = Item("UID:BBBBBBB") assert ( _resolve_conflict_via_command( a, b, ["~/command"], "a", "b", _check_call=check_call ).raw == a.raw ) vdirsyncer-0.18.0/tests/unit/cli/test_discover.py000066400000000000000000000117061406140636100221150ustar00rootroot00000000000000import pytest from vdirsyncer.cli.discover import expand_collections missing = object() @pytest.mark.parametrize( "shortcuts,expected", [ ( ["from a"], [ ( "c1", ( {"type": "fooboo", "custom_arg": "a1", "collection": "c1"}, {"type": "fooboo", "custom_arg": "b1", "collection": "c1"}, ), ), ( "c2", ( {"type": "fooboo", "custom_arg": "a2", "collection": "c2"}, {"type": "fooboo", "custom_arg": "b2", "collection": "c2"}, ), ), ( "a3", ( {"type": "fooboo", "custom_arg": "a3", "collection": "a3"}, missing, ), ), ], ), ( ["from b"], [ ( "c1", ( {"type": "fooboo", "custom_arg": "a1", "collection": "c1"}, {"type": "fooboo", "custom_arg": "b1", "collection": "c1"}, ), ), ( "c2", ( {"type": "fooboo", "custom_arg": "a2", "collection": "c2"}, {"type": "fooboo", "custom_arg": "b2", "collection": "c2"}, ), ), ( "b3", ( missing, {"type": "fooboo", "custom_arg": "b3", "collection": "b3"}, ), ), ], ), ( ["from a", "from b"], [ ( "c1", ( {"type": "fooboo", "custom_arg": "a1", "collection": "c1"}, {"type": "fooboo", "custom_arg": "b1", "collection": "c1"}, ), ), ( "c2", ( {"type": "fooboo", "custom_arg": "a2", "collection": "c2"}, {"type": "fooboo", "custom_arg": "b2", "collection": "c2"}, ), ), ( "a3", ( {"type": "fooboo", "custom_arg": "a3", "collection": "a3"}, missing, ), ), ( "b3", ( missing, {"type": "fooboo", "custom_arg": "b3", "collection": "b3"}, ), ), ], ), ( [["c12", "c1", "c2"]], [ ( "c12", ( {"type": "fooboo", "custom_arg": "a1", "collection": "c1"}, {"type": "fooboo", "custom_arg": "b2", "collection": "c2"}, ), ), ], ), ( None, [ ( None, ( {"type": "fooboo", "storage_side": "a", "collection": None}, {"type": "fooboo", "storage_side": "b", "collection": None}, ), ) ], ), ( [None], [ ( None, ( {"type": "fooboo", "storage_side": "a", "collection": None}, {"type": "fooboo", "storage_side": "b", "collection": None}, ), ) ], ), ], ) def test_expand_collections(shortcuts, expected): config_a = {"type": "fooboo", "storage_side": "a"} config_b = {"type": "fooboo", "storage_side": "b"} def get_discovered_a(): return { "c1": {"type": "fooboo", "custom_arg": "a1", "collection": "c1"}, "c2": {"type": "fooboo", "custom_arg": "a2", "collection": "c2"}, "a3": {"type": "fooboo", "custom_arg": "a3", "collection": "a3"}, } def get_discovered_b(): return { "c1": {"type": "fooboo", "custom_arg": "b1", "collection": "c1"}, "c2": {"type": "fooboo", "custom_arg": "b2", "collection": "c2"}, "b3": {"type": "fooboo", "custom_arg": "b3", "collection": "b3"}, } assert ( sorted( expand_collections( shortcuts, config_a, config_b, get_discovered_a, get_discovered_b, lambda config, collection: missing, ) ) == sorted(expected) ) vdirsyncer-0.18.0/tests/unit/cli/test_fetchparams.py000066400000000000000000000062631406140636100225760ustar00rootroot00000000000000from contextlib import contextmanager from unittest.mock import patch import hypothesis.strategies as st import pytest from hypothesis import given from vdirsyncer import exceptions from vdirsyncer.cli.fetchparams import expand_fetch_params from vdirsyncer.cli.fetchparams import STRATEGIES @pytest.fixture def mystrategy(monkeypatch): def strategy(x): calls.append(x) return x calls = [] monkeypatch.setitem(STRATEGIES, "mystrategy", strategy) return calls @contextmanager def dummy_strategy(): def strategy(x): calls.append(x) return x calls = [] with patch.dict(STRATEGIES, {"mystrategy": strategy}): yield calls @pytest.fixture def value_cache(monkeypatch): _cache = {} class FakeContext: fetched_params = _cache def find_object(self, _): return self def get_context(*a, **kw): return FakeContext() monkeypatch.setattr("click.get_current_context", get_context) return _cache def test_key_conflict(monkeypatch, mystrategy): with pytest.raises(ValueError) as excinfo: expand_fetch_params({"foo": "bar", "foo.fetch": ["mystrategy", "baz"]}) assert "Can't set foo.fetch and foo." in str(excinfo.value) @given(s=st.text(), t=st.text(min_size=1)) def test_fuzzing(s, t): with dummy_strategy(): config = expand_fetch_params({f"{s}.fetch": ["mystrategy", t]}) assert config[s] == t @pytest.mark.parametrize("value", [[], "lol", 42]) def test_invalid_fetch_value(mystrategy, value): with pytest.raises(ValueError) as excinfo: expand_fetch_params({"foo.fetch": value}) assert "Expected a list" in str( excinfo.value ) or "Expected list of length > 0" in str(excinfo.value) def test_unknown_strategy(): with pytest.raises(exceptions.UserError) as excinfo: expand_fetch_params({"foo.fetch": ["unreal", "asdf"]}) assert "Unknown strategy" in str(excinfo.value) def test_caching(monkeypatch, mystrategy, value_cache): orig_cfg = {"foo.fetch": ["mystrategy", "asdf"]} rv = expand_fetch_params(orig_cfg) assert rv["foo"] == "asdf" assert mystrategy == ["asdf"] assert len(value_cache) == 1 rv = expand_fetch_params(orig_cfg) assert rv["foo"] == "asdf" assert mystrategy == ["asdf"] assert len(value_cache) == 1 value_cache.clear() rv = expand_fetch_params(orig_cfg) assert rv["foo"] == "asdf" assert mystrategy == ["asdf"] * 2 assert len(value_cache) == 1 def test_failed_strategy(monkeypatch, value_cache): calls = [] def strategy(x): calls.append(x) raise KeyboardInterrupt() monkeypatch.setitem(STRATEGIES, "mystrategy", strategy) orig_cfg = {"foo.fetch": ["mystrategy", "asdf"]} for _ in range(2): with pytest.raises(KeyboardInterrupt): expand_fetch_params(orig_cfg) assert len(value_cache) == 1 assert len(calls) == 1 def test_empty_value(monkeypatch, mystrategy): with pytest.raises(exceptions.UserError) as excinfo: expand_fetch_params({"foo.fetch": ["mystrategy", ""]}) assert "Empty value for foo.fetch, this most likely indicates an error" in str( excinfo.value ) vdirsyncer-0.18.0/tests/unit/sync/000077500000000000000000000000001406140636100170665ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/unit/sync/test_status.py000066400000000000000000000021561406140636100220260ustar00rootroot00000000000000import hypothesis.strategies as st from hypothesis import assume from hypothesis import given from vdirsyncer.sync.status import SqliteStatus status_dict_strategy = st.dictionaries( st.text(), st.tuples( *( st.fixed_dictionaries( {"href": st.text(), "hash": st.text(), "etag": st.text()} ) for _ in range(2) ) ), ) @given(status_dict=status_dict_strategy) def test_legacy_status(status_dict): hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()} hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()} assume(len(hrefs_a) == len(status_dict) == len(hrefs_b)) status = SqliteStatus() status.load_legacy_status(status_dict) assert dict(status.to_legacy_status()) == status_dict for ident, (meta_a, meta_b) in status_dict.items(): ident_a, meta2_a = status.get_by_href_a(meta_a["href"]) ident_b, meta2_b = status.get_by_href_b(meta_b["href"]) assert meta2_a.to_status() == meta_a assert meta2_b.to_status() == meta_b assert ident_a == ident_b == ident vdirsyncer-0.18.0/tests/unit/sync/test_sync.py000066400000000000000000000442571406140636100214670ustar00rootroot00000000000000from copy import deepcopy import hypothesis.strategies as st import pytest from hypothesis import assume from hypothesis.stateful import Bundle from hypothesis.stateful import rule from hypothesis.stateful import RuleBasedStateMachine from tests import blow_up from tests import uid_strategy from vdirsyncer.storage.memory import _random_string from vdirsyncer.storage.memory import MemoryStorage from vdirsyncer.sync import sync as _sync from vdirsyncer.sync.exceptions import BothReadOnly from vdirsyncer.sync.exceptions import IdentConflict from vdirsyncer.sync.exceptions import PartialSync from vdirsyncer.sync.exceptions import StorageEmpty from vdirsyncer.sync.exceptions import SyncConflict from vdirsyncer.sync.status import SqliteStatus from vdirsyncer.vobject import Item def sync(a, b, status, *args, **kwargs): new_status = SqliteStatus(":memory:") new_status.load_legacy_status(status) rv = _sync(a, b, new_status, *args, **kwargs) status.clear() status.update(new_status.to_legacy_status()) return rv def empty_storage(x): return list(x.list()) == [] def items(s): return {x[1].raw for x in s.items.values()} def test_irrelevant_status(): a = MemoryStorage() b = MemoryStorage() status = {"1": ("1", 1234, "1.ics", 2345)} sync(a, b, status) assert not status assert not items(a) assert not items(b) def test_missing_status(): a = MemoryStorage() b = MemoryStorage() status = {} item = Item("asdf") a.upload(item) b.upload(item) sync(a, b, status) assert len(status) == 1 assert items(a) == items(b) == {item.raw} def test_missing_status_and_different_items(): a = MemoryStorage() b = MemoryStorage() status = {} item1 = Item("UID:1\nhaha") item2 = Item("UID:1\nhoho") a.upload(item1) b.upload(item2) with pytest.raises(SyncConflict): sync(a, b, status) assert not status sync(a, b, status, conflict_resolution="a wins") assert items(a) == items(b) == {item1.raw} def test_read_only_and_prefetch(): a = MemoryStorage() b = MemoryStorage() b.read_only = True status = {} item1 = Item("UID:1\nhaha") item2 = Item("UID:2\nhoho") a.upload(item1) a.upload(item2) sync(a, b, status, force_delete=True) sync(a, b, status, force_delete=True) assert not items(a) and not items(b) def test_partial_sync_error(): a = MemoryStorage() b = MemoryStorage() status = {} a.upload(Item("UID:0")) b.read_only = True with pytest.raises(PartialSync): sync(a, b, status, partial_sync="error") def test_partial_sync_ignore(): a = MemoryStorage() b = MemoryStorage() status = {} item0 = Item("UID:0\nhehe") a.upload(item0) b.upload(item0) b.read_only = True item1 = Item("UID:1\nhaha") a.upload(item1) sync(a, b, status, partial_sync="ignore") sync(a, b, status, partial_sync="ignore") assert items(a) == {item0.raw, item1.raw} assert items(b) == {item0.raw} def test_partial_sync_ignore2(): a = MemoryStorage() b = MemoryStorage() status = {} href, etag = a.upload(Item("UID:0")) a.read_only = True sync(a, b, status, partial_sync="ignore", force_delete=True) assert items(b) == items(a) == {"UID:0"} b.items.clear() sync(a, b, status, partial_sync="ignore", force_delete=True) sync(a, b, status, partial_sync="ignore", force_delete=True) assert items(a) == {"UID:0"} assert not b.items a.read_only = False a.update(href, Item("UID:0\nupdated"), etag) a.read_only = True sync(a, b, status, partial_sync="ignore", force_delete=True) assert items(b) == items(a) == {"UID:0\nupdated"} def test_upload_and_update(): a = MemoryStorage(fileext=".a") b = MemoryStorage(fileext=".b") status = {} item = Item("UID:1") # new item 1 in a a.upload(item) sync(a, b, status) assert items(b) == items(a) == {item.raw} item = Item("UID:1\nASDF:YES") # update of item 1 in b b.update("1.b", item, b.get("1.b")[1]) sync(a, b, status) assert items(b) == items(a) == {item.raw} item2 = Item("UID:2") # new item 2 in b b.upload(item2) sync(a, b, status) assert items(b) == items(a) == {item.raw, item2.raw} item2 = Item("UID:2\nASDF:YES") # update of item 2 in a a.update("2.a", item2, a.get("2.a")[1]) sync(a, b, status) assert items(b) == items(a) == {item.raw, item2.raw} def test_deletion(): a = MemoryStorage(fileext=".a") b = MemoryStorage(fileext=".b") status = {} item = Item("UID:1") a.upload(item) item2 = Item("UID:2") a.upload(item2) sync(a, b, status) b.delete("1.b", b.get("1.b")[1]) sync(a, b, status) assert items(a) == items(b) == {item2.raw} a.upload(item) sync(a, b, status) assert items(a) == items(b) == {item.raw, item2.raw} a.delete("1.a", a.get("1.a")[1]) sync(a, b, status) assert items(a) == items(b) == {item2.raw} def test_insert_hash(): a = MemoryStorage() b = MemoryStorage() status = {} item = Item("UID:1") href, etag = a.upload(item) sync(a, b, status) for d in status["1"]: del d["hash"] a.update(href, Item("UID:1\nHAHA:YES"), etag) sync(a, b, status) assert "hash" in status["1"][0] and "hash" in status["1"][1] def test_already_synced(): a = MemoryStorage(fileext=".a") b = MemoryStorage(fileext=".b") item = Item("UID:1") a.upload(item) b.upload(item) status = { "1": ( {"href": "1.a", "hash": item.hash, "etag": a.get("1.a")[1]}, {"href": "1.b", "hash": item.hash, "etag": b.get("1.b")[1]}, ) } old_status = deepcopy(status) a.update = b.update = a.upload = b.upload = lambda *a, **kw: pytest.fail( "Method shouldn't have been called." ) for _ in (1, 2): sync(a, b, status) assert status == old_status assert items(a) == items(b) == {item.raw} @pytest.mark.parametrize("winning_storage", "ab") def test_conflict_resolution_both_etags_new(winning_storage): a = MemoryStorage() b = MemoryStorage() item = Item("UID:1") href_a, etag_a = a.upload(item) href_b, etag_b = b.upload(item) status = {} sync(a, b, status) assert status item_a = Item("UID:1\nitem a") item_b = Item("UID:1\nitem b") a.update(href_a, item_a, etag_a) b.update(href_b, item_b, etag_b) with pytest.raises(SyncConflict): sync(a, b, status) sync(a, b, status, conflict_resolution=f"{winning_storage} wins") assert ( items(a) == items(b) == {item_a.raw if winning_storage == "a" else item_b.raw} ) def test_updated_and_deleted(): a = MemoryStorage() b = MemoryStorage() href_a, etag_a = a.upload(Item("UID:1")) status = {} sync(a, b, status, force_delete=True) ((href_b, etag_b),) = b.list() b.delete(href_b, etag_b) updated = Item("UID:1\nupdated") a.update(href_a, updated, etag_a) sync(a, b, status, force_delete=True) assert items(a) == items(b) == {updated.raw} def test_conflict_resolution_invalid_mode(): a = MemoryStorage() b = MemoryStorage() item_a = Item("UID:1\nitem a") item_b = Item("UID:1\nitem b") a.upload(item_a) b.upload(item_b) with pytest.raises(ValueError): sync(a, b, {}, conflict_resolution="yolo") def test_conflict_resolution_new_etags_without_changes(): a = MemoryStorage() b = MemoryStorage() item = Item("UID:1") href_a, etag_a = a.upload(item) href_b, etag_b = b.upload(item) status = {"1": (href_a, "BOGUS_a", href_b, "BOGUS_b")} sync(a, b, status) ((ident, (status_a, status_b)),) = status.items() assert ident == "1" assert status_a["href"] == href_a assert status_a["etag"] == etag_a assert status_b["href"] == href_b assert status_b["etag"] == etag_b def test_uses_get_multi(monkeypatch): def breakdown(*a, **kw): raise AssertionError("Expected use of get_multi") get_multi_calls = [] old_get = MemoryStorage.get def get_multi(self, hrefs): hrefs = list(hrefs) get_multi_calls.append(hrefs) for href in hrefs: item, etag = old_get(self, href) yield href, item, etag monkeypatch.setattr(MemoryStorage, "get", breakdown) monkeypatch.setattr(MemoryStorage, "get_multi", get_multi) a = MemoryStorage() b = MemoryStorage() item = Item("UID:1") expected_href, etag = a.upload(item) sync(a, b, {}) assert get_multi_calls == [[expected_href]] def test_empty_storage_dataloss(): a = MemoryStorage() b = MemoryStorage() a.upload(Item("UID:1")) a.upload(Item("UID:2")) status = {} sync(a, b, status) with pytest.raises(StorageEmpty): sync(MemoryStorage(), b, status) with pytest.raises(StorageEmpty): sync(a, MemoryStorage(), status) def test_no_uids(): a = MemoryStorage() b = MemoryStorage() a.upload(Item("ASDF")) b.upload(Item("FOOBAR")) status = {} sync(a, b, status) assert items(a) == items(b) == {"ASDF", "FOOBAR"} def test_changed_uids(): a = MemoryStorage() b = MemoryStorage() href_a, etag_a = a.upload(Item("UID:A-ONE")) href_b, etag_b = b.upload(Item("UID:B-ONE")) status = {} sync(a, b, status) a.update(href_a, Item("UID:A-TWO"), etag_a) sync(a, b, status) def test_both_readonly(): a = MemoryStorage(read_only=True) b = MemoryStorage(read_only=True) assert a.read_only assert b.read_only status = {} with pytest.raises(BothReadOnly): sync(a, b, status) def test_partial_sync_revert(): a = MemoryStorage(instance_name="a") b = MemoryStorage(instance_name="b") status = {} a.upload(Item("UID:1")) b.upload(Item("UID:2")) b.read_only = True sync(a, b, status, partial_sync="revert") assert len(status) == 2 assert items(a) == {"UID:1", "UID:2"} assert items(b) == {"UID:2"} sync(a, b, status, partial_sync="revert") assert len(status) == 1 assert items(a) == {"UID:2"} assert items(b) == {"UID:2"} # Check that updates get reverted a.items[next(iter(a.items))] = ("foo", Item("UID:2\nupdated")) assert items(a) == {"UID:2\nupdated"} sync(a, b, status, partial_sync="revert") assert len(status) == 1 assert items(a) == {"UID:2\nupdated"} sync(a, b, status, partial_sync="revert") assert items(a) == {"UID:2"} # Check that deletions get reverted a.items.clear() sync(a, b, status, partial_sync="revert", force_delete=True) sync(a, b, status, partial_sync="revert", force_delete=True) assert items(a) == {"UID:2"} @pytest.mark.parametrize("sync_inbetween", (True, False)) def test_ident_conflict(sync_inbetween): a = MemoryStorage() b = MemoryStorage() status = {} href_a, etag_a = a.upload(Item("UID:aaa")) href_b, etag_b = a.upload(Item("UID:bbb")) if sync_inbetween: sync(a, b, status) a.update(href_a, Item("UID:xxx"), etag_a) a.update(href_b, Item("UID:xxx"), etag_b) with pytest.raises(IdentConflict): sync(a, b, status) def test_moved_href(): """ Concrete application: ppl_ stores contact aliases in filenames, which means item's hrefs get changed. Vdirsyncer doesn't synchronize this data, but also shouldn't do things like deleting and re-uploading to the server. .. _ppl: http://ppladdressbook.org/ """ a = MemoryStorage() b = MemoryStorage() status = {} href, etag = a.upload(Item("UID:haha")) sync(a, b, status) b.items["lol"] = b.items.pop("haha") # The sync algorithm should prefetch `lol`, see that it's the same ident # and not do anything else. a.get_multi = blow_up # Absolutely no prefetch on A # No actual sync actions a.delete = a.update = a.upload = b.delete = b.update = b.upload = blow_up sync(a, b, status) assert len(status) == 1 assert items(a) == items(b) == {"UID:haha"} assert status["haha"][1]["href"] == "lol" old_status = deepcopy(status) # Further sync should be a noop. Not even prefetching should occur. b.get_multi = blow_up sync(a, b, status) assert old_status == status assert items(a) == items(b) == {"UID:haha"} def test_bogus_etag_change(): """Assert that sync algorithm is resilient against etag changes if content didn\'t change. In this particular case we test a scenario where both etags have been updated, but only one side actually changed its item content. """ a = MemoryStorage() b = MemoryStorage() status = {} href_a, etag_a = a.upload(Item("UID:ASDASD")) sync(a, b, status) assert len(status) == len(list(a.list())) == len(list(b.list())) == 1 ((href_b, etag_b),) = b.list() a.update(href_a, Item("UID:ASDASD"), etag_a) b.update(href_b, Item("UID:ASDASD\nACTUALCHANGE:YES"), etag_b) b.delete = b.update = b.upload = blow_up sync(a, b, status) assert len(status) == 1 assert items(a) == items(b) == {"UID:ASDASD\nACTUALCHANGE:YES"} def test_unicode_hrefs(): a = MemoryStorage() b = MemoryStorage() status = {} href, etag = a.upload(Item("UID:äää")) sync(a, b, status) class ActionIntentionallyFailed(Exception): pass def action_failure(*a, **kw): raise ActionIntentionallyFailed() class SyncMachine(RuleBasedStateMachine): Status = Bundle("status") Storage = Bundle("storage") @rule(target=Storage, flaky_etags=st.booleans(), null_etag_on_upload=st.booleans()) def newstorage(self, flaky_etags, null_etag_on_upload): s = MemoryStorage() if flaky_etags: def get(href): old_etag, item = s.items[href] etag = _random_string() s.items[href] = etag, item return item, etag s.get = get if null_etag_on_upload: _old_upload = s.upload _old_update = s.update s.upload = lambda item: (_old_upload(item)[0], "NULL") s.update = lambda h, i, e: _old_update(h, i, e) and "NULL" return s @rule(s=Storage, read_only=st.booleans()) def is_read_only(self, s, read_only): assume(s.read_only != read_only) s.read_only = read_only @rule(s=Storage) def actions_fail(self, s): s.upload = action_failure s.update = action_failure s.delete = action_failure @rule(s=Storage) def none_as_etag(self, s): _old_upload = s.upload _old_update = s.update def upload(item): return _old_upload(item)[0], None def update(href, item, etag): _old_update(href, item, etag) s.upload = upload s.update = update @rule(target=Status) def newstatus(self): return {} @rule(storage=Storage, uid=uid_strategy, etag=st.text()) def upload(self, storage, uid, etag): item = Item(f"UID:{uid}") storage.items[uid] = (etag, item) @rule(storage=Storage, href=st.text()) def delete(self, storage, href): assume(storage.items.pop(href, None)) @rule( status=Status, a=Storage, b=Storage, force_delete=st.booleans(), conflict_resolution=st.one_of((st.just("a wins"), st.just("b wins"))), with_error_callback=st.booleans(), partial_sync=st.one_of( (st.just("ignore"), st.just("revert"), st.just("error")) ), ) def sync( self, status, a, b, force_delete, conflict_resolution, with_error_callback, partial_sync, ): assume(a is not b) old_items_a = items(a) old_items_b = items(b) a.instance_name = "a" b.instance_name = "b" errors = [] if with_error_callback: error_callback = errors.append else: error_callback = None try: # If one storage is read-only, double-sync because changes don't # get reverted immediately. for _ in range(2 if a.read_only or b.read_only else 1): sync( a, b, status, force_delete=force_delete, conflict_resolution=conflict_resolution, error_callback=error_callback, partial_sync=partial_sync, ) for e in errors: raise e except PartialSync: assert partial_sync == "error" except ActionIntentionallyFailed: pass except BothReadOnly: assert a.read_only and b.read_only assume(False) except StorageEmpty: if force_delete: raise else: assert not list(a.list()) or not list(b.list()) else: items_a = items(a) items_b = items(b) assert items_a == items_b or partial_sync == "ignore" assert items_a == old_items_a or not a.read_only assert items_b == old_items_b or not b.read_only assert ( set(a.items) | set(b.items) == set(status) or partial_sync == "ignore" ) TestSyncMachine = SyncMachine.TestCase @pytest.mark.parametrize("error_callback", [True, False]) def test_rollback(error_callback): a = MemoryStorage() b = MemoryStorage() status = {} a.items["0"] = ("", Item("UID:0")) b.items["1"] = ("", Item("UID:1")) b.upload = b.update = b.delete = action_failure if error_callback: errors = [] sync( a, b, status=status, conflict_resolution="a wins", error_callback=errors.append, ) assert len(errors) == 1 assert isinstance(errors[0], ActionIntentionallyFailed) assert len(status) == 1 assert status["1"] else: with pytest.raises(ActionIntentionallyFailed): sync(a, b, status=status, conflict_resolution="a wins") def test_duplicate_hrefs(): a = MemoryStorage() b = MemoryStorage() a.list = lambda: [("a", "a")] * 3 a.items["a"] = ("a", Item("UID:a")) status = {} sync(a, b, status) with pytest.raises(AssertionError): sync(a, b, status) vdirsyncer-0.18.0/tests/unit/test_exceptions.py000066400000000000000000000005121406140636100217020ustar00rootroot00000000000000from vdirsyncer import exceptions def test_user_error_problems(): e = exceptions.UserError( "A few problems occurred", problems=["Problem one", "Problem two", "Problem three"], ) assert "one" in str(e) assert "two" in str(e) assert "three" in str(e) assert "problems occurred" in str(e) vdirsyncer-0.18.0/tests/unit/test_metasync.py000066400000000000000000000106401406140636100213470ustar00rootroot00000000000000import hypothesis.strategies as st import pytest from hypothesis import example from hypothesis import given from tests import blow_up from vdirsyncer.exceptions import UserError from vdirsyncer.metasync import logger from vdirsyncer.metasync import metasync from vdirsyncer.metasync import MetaSyncConflict from vdirsyncer.storage.base import normalize_meta_value from vdirsyncer.storage.memory import MemoryStorage def test_irrelevant_status(): a = MemoryStorage() b = MemoryStorage() status = {"foo": "bar"} metasync(a, b, status, keys=()) assert not status def test_basic(monkeypatch): a = MemoryStorage() b = MemoryStorage() status = {} a.set_meta("foo", "bar") metasync(a, b, status, keys=["foo"]) assert a.get_meta("foo") == b.get_meta("foo") == "bar" a.set_meta("foo", "baz") metasync(a, b, status, keys=["foo"]) assert a.get_meta("foo") == b.get_meta("foo") == "baz" monkeypatch.setattr(a, "set_meta", blow_up) monkeypatch.setattr(b, "set_meta", blow_up) metasync(a, b, status, keys=["foo"]) assert a.get_meta("foo") == b.get_meta("foo") == "baz" monkeypatch.undo() monkeypatch.undo() b.set_meta("foo", None) metasync(a, b, status, keys=["foo"]) assert not a.get_meta("foo") and not b.get_meta("foo") @pytest.fixture def conflict_state(request): a = MemoryStorage() b = MemoryStorage() status = {} a.set_meta("foo", "bar") b.set_meta("foo", "baz") def cleanup(): assert a.get_meta("foo") == "bar" assert b.get_meta("foo") == "baz" assert not status request.addfinalizer(cleanup) return a, b, status def test_conflict(conflict_state): a, b, status = conflict_state with pytest.raises(MetaSyncConflict): metasync(a, b, status, keys=["foo"]) def test_invalid_conflict_resolution(conflict_state): a, b, status = conflict_state with pytest.raises(UserError) as excinfo: metasync(a, b, status, keys=["foo"], conflict_resolution="foo") assert "Invalid conflict resolution setting" in str(excinfo.value) def test_warning_on_custom_conflict_commands(conflict_state, monkeypatch): a, b, status = conflict_state warnings = [] monkeypatch.setattr(logger, "warning", warnings.append) with pytest.raises(MetaSyncConflict): metasync(a, b, status, keys=["foo"], conflict_resolution=lambda *a, **kw: None) assert warnings == ["Custom commands don't work on metasync."] def test_conflict_same_content(): a = MemoryStorage() b = MemoryStorage() status = {} a.set_meta("foo", "bar") b.set_meta("foo", "bar") metasync(a, b, status, keys=["foo"]) assert a.get_meta("foo") == b.get_meta("foo") == status["foo"] == "bar" @pytest.mark.parametrize("wins", "ab") def test_conflict_x_wins(wins): a = MemoryStorage() b = MemoryStorage() status = {} a.set_meta("foo", "bar") b.set_meta("foo", "baz") metasync( a, b, status, keys=["foo"], conflict_resolution="a wins" if wins == "a" else "b wins", ) assert ( a.get_meta("foo") == b.get_meta("foo") == status["foo"] == ("bar" if wins == "a" else "baz") ) keys = st.text(min_size=1).filter(lambda x: x.strip() == x) values = st.text().filter(lambda x: normalize_meta_value(x) == x) metadata = st.dictionaries(keys, values) @given( a=metadata, b=metadata, status=metadata, keys=st.sets(keys), conflict_resolution=st.just("a wins") | st.just("b wins"), ) @example( a={"0": "0"}, b={}, status={"0": "0"}, keys={"0"}, conflict_resolution="a wins" ) @example( a={"0": "0"}, b={"0": "1"}, status={"0": "0"}, keys={"0"}, conflict_resolution="a wins", ) def test_fuzzing(a, b, status, keys, conflict_resolution): def _get_storage(m, instance_name): s = MemoryStorage(instance_name=instance_name) s.metadata = m return s a = _get_storage(a, "A") b = _get_storage(b, "B") winning_storage = a if conflict_resolution == "a wins" else b expected_values = { key: winning_storage.get_meta(key) for key in keys if key not in status } metasync(a, b, status, keys=keys, conflict_resolution=conflict_resolution) for key in keys: s = status.get(key, "") assert a.get_meta(key) == b.get_meta(key) == s if expected_values.get(key, "") and s: assert s == expected_values[key] vdirsyncer-0.18.0/tests/unit/test_repair.py000066400000000000000000000044131406140636100210070ustar00rootroot00000000000000import pytest from hypothesis import given from hypothesis import HealthCheck from hypothesis import settings from tests import uid_strategy from vdirsyncer.repair import IrreparableItem from vdirsyncer.repair import repair_item from vdirsyncer.repair import repair_storage from vdirsyncer.storage.memory import MemoryStorage from vdirsyncer.utils import href_safe from vdirsyncer.vobject import Item @given(uid=uid_strategy) # Using the random module for UIDs: @settings(suppress_health_check=HealthCheck.all()) def test_repair_uids(uid): s = MemoryStorage() s.items = { "one": ("asdf", Item(f"BEGIN:VCARD\nFN:Hans\nUID:{uid}\nEND:VCARD")), "two": ("asdf", Item(f"BEGIN:VCARD\nFN:Peppi\nUID:{uid}\nEND:VCARD")), } uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()] assert uid1 == uid2 repair_storage(s, repair_unsafe_uid=False) uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()] assert uid1 != uid2 @given(uid=uid_strategy.filter(lambda x: not href_safe(x))) # Using the random module for UIDs: @settings(suppress_health_check=HealthCheck.all()) def test_repair_unsafe_uids(uid): s = MemoryStorage() item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD") href, etag = s.upload(item) assert s.get(href)[0].uid == uid assert not href_safe(uid) repair_storage(s, repair_unsafe_uid=True) new_href = list(s.list())[0][0] assert href_safe(new_href) newuid = s.get(new_href)[0].uid assert href_safe(newuid) @pytest.mark.parametrize( "uid,href", [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")] ) def test_repair_unsafe_href(uid, href): item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD") new_item = repair_item(href, item, set(), True) assert new_item.raw != item.raw assert new_item.uid != item.uid assert href_safe(new_item.uid) def test_repair_do_nothing(): item = Item("BEGIN:VCARD\nUID:justfine\nEND:VCARD") assert repair_item("fine", item, set(), True) is item assert repair_item("@@@@/fine", item, set(), True) is item @pytest.mark.parametrize( "raw", ["AYYY", "", "@@@@", "BEGIN:VCARD", "BEGIN:FOO\nEND:FOO"] ) def test_repair_irreparable(raw): with pytest.raises(IrreparableItem): repair_item("fine", Item(raw), set(), True) vdirsyncer-0.18.0/tests/unit/utils/000077500000000000000000000000001406140636100172525ustar00rootroot00000000000000vdirsyncer-0.18.0/tests/unit/utils/test_vobject.py000066400000000000000000000225411406140636100223230ustar00rootroot00000000000000from textwrap import dedent import hypothesis.strategies as st import pytest from hypothesis import assume from hypothesis import given from hypothesis.stateful import Bundle from hypothesis.stateful import rule from hypothesis.stateful import RuleBasedStateMachine import vdirsyncer.vobject as vobject from tests import BARE_EVENT_TEMPLATE from tests import EVENT_TEMPLATE from tests import EVENT_WITH_TIMEZONE_TEMPLATE from tests import normalize_item from tests import uid_strategy from tests import VCARD_TEMPLATE _simple_split = [ VCARD_TEMPLATE.format(r=123, uid=123), VCARD_TEMPLATE.format(r=345, uid=345), VCARD_TEMPLATE.format(r=678, uid=678), ] _simple_joined = "\r\n".join( ["BEGIN:VADDRESSBOOK"] + _simple_split + ["END:VADDRESSBOOK\r\n"] ) def test_split_collection_simple(benchmark): given = benchmark(lambda: list(vobject.split_collection(_simple_joined))) assert [normalize_item(item) for item in given] == [ normalize_item(item) for item in _simple_split ] assert [x.splitlines() for x in given] == [x.splitlines() for x in _simple_split] def test_split_collection_multiple_wrappers(benchmark): joined = "\r\n".join( "BEGIN:VADDRESSBOOK\r\n" + x + "\r\nEND:VADDRESSBOOK\r\n" for x in _simple_split ) given = benchmark(lambda: list(vobject.split_collection(joined))) assert [normalize_item(item) for item in given] == [ normalize_item(item) for item in _simple_split ] assert [x.splitlines() for x in given] == [x.splitlines() for x in _simple_split] def test_join_collection_simple(benchmark): given = benchmark(lambda: vobject.join_collection(_simple_split)) assert normalize_item(given) == normalize_item(_simple_joined) assert given.splitlines() == _simple_joined.splitlines() def test_join_collection_vevents(benchmark): actual = benchmark( lambda: vobject.join_collection( [ dedent( """ BEGIN:VCALENDAR VERSION:2.0 PRODID:HUEHUE BEGIN:VTIMEZONE VALUE:The Timezone END:VTIMEZONE BEGIN:VEVENT VALUE:Event {} END:VEVENT END:VCALENDAR """ ).format(i) for i in range(3) ] ) ) expected = dedent( """ BEGIN:VCALENDAR VERSION:2.0 PRODID:HUEHUE BEGIN:VTIMEZONE VALUE:The Timezone END:VTIMEZONE BEGIN:VEVENT VALUE:Event 0 END:VEVENT BEGIN:VEVENT VALUE:Event 1 END:VEVENT BEGIN:VEVENT VALUE:Event 2 END:VEVENT END:VCALENDAR """ ).lstrip() assert actual.splitlines() == expected.splitlines() def test_split_collection_timezones(): items = [ BARE_EVENT_TEMPLATE.format(r=123, uid=123), BARE_EVENT_TEMPLATE.format(r=345, uid=345), ] timezone = ( "BEGIN:VTIMEZONE\r\n" "TZID:/mozilla.org/20070129_1/Asia/Tokyo\r\n" "X-LIC-LOCATION:Asia/Tokyo\r\n" "BEGIN:STANDARD\r\n" "TZOFFSETFROM:+0900\r\n" "TZOFFSETTO:+0900\r\n" "TZNAME:JST\r\n" "DTSTART:19700101T000000\r\n" "END:STANDARD\r\n" "END:VTIMEZONE" ) full = "\r\n".join(["BEGIN:VCALENDAR"] + items + [timezone, "END:VCALENDAR"]) given = {normalize_item(item) for item in vobject.split_collection(full)} expected = { normalize_item( "\r\n".join(("BEGIN:VCALENDAR", item, timezone, "END:VCALENDAR")) ) for item in items } assert given == expected def test_split_contacts(): bare = "\r\n".join([VCARD_TEMPLATE.format(r=x, uid=x) for x in range(4)]) with_wrapper = "BEGIN:VADDRESSBOOK\r\n" + bare + "\nEND:VADDRESSBOOK\r\n" for _ in (bare, with_wrapper): split = list(vobject.split_collection(bare)) assert len(split) == 4 assert vobject.join_collection(split).splitlines() == with_wrapper.splitlines() def test_hash_item(): a = EVENT_TEMPLATE.format(r=1, uid=1) b = "\n".join(line for line in a.splitlines() if "PRODID" not in line) assert vobject.hash_item(a) == vobject.hash_item(b) def test_multiline_uid(benchmark): a = "BEGIN:FOO\r\n" "UID:123456789abcd\r\n" " efgh\r\n" "END:FOO\r\n" assert benchmark(lambda: vobject.Item(a).uid) == "123456789abcdefgh" complex_uid_item = dedent( """ BEGIN:VCALENDAR BEGIN:VTIMEZONE TZID:Europe/Rome X-LIC-LOCATION:Europe/Rome BEGIN:DAYLIGHT TZOFFSETFROM:+0100 TZOFFSETTO:+0200 TZNAME:CEST DTSTART:19700329T020000 RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 END:DAYLIGHT BEGIN:STANDARD TZOFFSETFROM:+0200 TZOFFSETTO:+0100 TZNAME:CET DTSTART:19701025T030000 RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 END:STANDARD END:VTIMEZONE BEGIN:VEVENT DTSTART:20140124T133000Z DTEND:20140124T143000Z DTSTAMP:20140612T090652Z UID:040000008200E00074C5B7101A82E0080000000050AAABEEF50DCF 001000000062548482FA830A46B9EA62114AC9F0EF CREATED:20140110T102231Z DESCRIPTION:Test. LAST-MODIFIED:20140123T095221Z LOCATION:25.12.01.51 SEQUENCE:0 STATUS:CONFIRMED SUMMARY:Präsentation TRANSP:OPAQUE END:VEVENT END:VCALENDAR """ ).strip() def test_multiline_uid_complex(benchmark): assert benchmark(lambda: vobject.Item(complex_uid_item).uid) == ( "040000008200E00074C5B7101A82E008000000005" "0AAABEEF50DCF001000000062548482FA830A46B9" "EA62114AC9F0EF" ) def test_replace_multiline_uid(benchmark): def inner(): return vobject.Item(complex_uid_item).with_uid("a").uid assert benchmark(inner) == "a" @pytest.mark.parametrize( "template", [EVENT_TEMPLATE, EVENT_WITH_TIMEZONE_TEMPLATE, VCARD_TEMPLATE] ) @given(uid=st.one_of(st.none(), uid_strategy)) def test_replace_uid(template, uid): item = vobject.Item(template.format(r=123, uid=123)).with_uid(uid) assert item.uid == uid if uid: assert item.raw.count(f"\nUID:{uid}") == 1 else: assert "\nUID:" not in item.raw def test_broken_item(): with pytest.raises(ValueError) as excinfo: vobject._Component.parse("END:FOO") assert "Parsing error at line 1" in str(excinfo.value) item = vobject.Item("END:FOO") assert item.parsed is None def test_multiple_items(): with pytest.raises(ValueError) as excinfo: vobject._Component.parse( [ "BEGIN:FOO", "END:FOO", "BEGIN:FOO", "END:FOO", ] ) assert "Found 2 components, expected one" in str(excinfo.value) c1, c2 = vobject._Component.parse( [ "BEGIN:FOO", "END:FOO", "BEGIN:FOO", "END:FOO", ], multiple=True, ) assert c1.name == c2.name == "FOO" def test_input_types(): lines = ["BEGIN:FOO", "FOO:BAR", "END:FOO"] for x in (lines, "\r\n".join(lines), "\r\n".join(lines).encode("ascii")): c = vobject._Component.parse(x) assert c.name == "FOO" assert c.props == ["FOO:BAR"] assert not c.subcomponents value_strategy = st.text( st.characters( blacklist_categories=("Zs", "Zl", "Zp", "Cc", "Cs"), blacklist_characters=":=" ), min_size=1, ).filter(lambda x: x.strip() == x) class VobjectMachine(RuleBasedStateMachine): Unparsed = Bundle("unparsed") Parsed = Bundle("parsed") @rule(target=Unparsed, joined=st.booleans(), encoded=st.booleans()) def get_unparsed_lines(self, joined, encoded): rv = ["BEGIN:FOO", "FOO:YES", "END:FOO"] if joined: rv = "\r\n".join(rv) if encoded: rv = rv.encode("utf-8") elif encoded: assume(False) return rv @rule(unparsed=Unparsed, target=Parsed) def parse(self, unparsed): return vobject._Component.parse(unparsed) @rule(parsed=Parsed, target=Unparsed) def serialize(self, parsed): return list(parsed.dump_lines()) @rule(c=Parsed, key=uid_strategy, value=uid_strategy) def add_prop(self, c, key, value): c[key] = value assert c[key] == value assert key in c assert c.get(key) == value dump = "\r\n".join(c.dump_lines()) assert key in dump and value in dump @rule( c=Parsed, key=uid_strategy, value=uid_strategy, params=st.lists(st.tuples(value_strategy, value_strategy)), ) def add_prop_raw(self, c, key, value, params): params_str = ",".join(k + "=" + v for k, v in params) c.props.insert(0, f"{key};{params_str}:{value}") assert c[key] == value assert key in c assert c.get(key) == value @rule(c=Parsed, sub_c=Parsed) def add_component(self, c, sub_c): assume(sub_c is not c and sub_c not in c) c.subcomponents.append(sub_c) assert "\r\n".join(sub_c.dump_lines()) in "\r\n".join(c.dump_lines()) @rule(c=Parsed) def sanity_check(self, c): c1 = vobject._Component.parse(c.dump_lines()) assert c1 == c TestVobjectMachine = VobjectMachine.TestCase def test_component_contains(): item = vobject._Component.parse(["BEGIN:FOO", "FOO:YES", "END:FOO"]) assert "FOO" in item assert "BAZ" not in item with pytest.raises(ValueError): 42 in item # noqa: B015 vdirsyncer-0.18.0/vdirsyncer/000077500000000000000000000000001406140636100161615ustar00rootroot00000000000000vdirsyncer-0.18.0/vdirsyncer/__init__.py000066400000000000000000000013411406140636100202710ustar00rootroot00000000000000""" Vdirsyncer synchronizes calendars and contacts. """ PROJECT_HOME = "https://github.com/pimutils/vdirsyncer" BUGTRACKER_HOME = PROJECT_HOME + "/issues" DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable" try: from .version import version as __version__ # noqa except ImportError: # pragma: no cover raise ImportError( "Failed to find (autogenerated) version.py. " "This might be because you are installing from GitHub's tarballs, " "use the PyPI ones." ) def _check_python_version(): # pragma: no cover import sys if sys.version_info < (3, 7, 0): print("vdirsyncer requires at least Python 3.7.") sys.exit(1) _check_python_version() del _check_python_version vdirsyncer-0.18.0/vdirsyncer/__main__.py000066400000000000000000000001111406140636100202440ustar00rootroot00000000000000if __name__ == "__main__": from vdirsyncer.cli import app app() vdirsyncer-0.18.0/vdirsyncer/cli/000077500000000000000000000000001406140636100167305ustar00rootroot00000000000000vdirsyncer-0.18.0/vdirsyncer/cli/__init__.py000066400000000000000000000161501406140636100210440ustar00rootroot00000000000000import functools import logging import sys import click import click_log from .. import __version__ from .. import BUGTRACKER_HOME cli_logger = logging.getLogger(__name__) click_log.basic_config("vdirsyncer") class AppContext: def __init__(self): self.config = None self.fetched_params = {} self.logger = None pass_context = click.make_pass_decorator(AppContext, ensure=True) def catch_errors(f): @functools.wraps(f) def inner(*a, **kw): try: f(*a, **kw) except BaseException: from .utils import handle_cli_error handle_cli_error() sys.exit(1) return inner @click.group() @click_log.simple_verbosity_option("vdirsyncer") @click.version_option(version=__version__) @click.option("--config", "-c", metavar="FILE", help="Config file to use.") @pass_context @catch_errors def app(ctx, config): """ Synchronize calendars and contacts """ if sys.platform == "win32": cli_logger.warning( "Vdirsyncer currently does not support Windows. " "You will likely encounter bugs. " "See {}/535 for more information.".format(BUGTRACKER_HOME) ) if not ctx.config: from .config import load_config ctx.config = load_config(config) main = app def max_workers_callback(ctx, param, value): if value == 0 and logging.getLogger("vdirsyncer").level == logging.DEBUG: value = 1 cli_logger.debug(f"Using {value} maximal workers.") return value def max_workers_option(default=0): help = "Use at most this many connections. " if default == 0: help += ( 'The default is 0, which means "as many as necessary". ' "With -vdebug enabled, the default is 1." ) else: help += f"The default is {default}." return click.option( "--max-workers", default=default, type=click.IntRange(min=0, max=None), callback=max_workers_callback, help=help, ) def collections_arg_callback(ctx, param, value): """ Expand the various CLI shortforms ("pair, pair/collection") to an iterable of (pair, collections). """ # XXX: Ugly! pass_context should work everywhere. config = ctx.find_object(AppContext).config rv = {} for pair_and_collection in value or config.pairs: pair, collection = pair_and_collection, None if "/" in pair: pair, collection = pair.split("/") collections = rv.setdefault(pair, set()) if collection: collections.add(collection) return rv.items() collections_arg = click.argument( "collections", nargs=-1, callback=collections_arg_callback ) @app.command() @collections_arg @click.option( "--force-delete/--no-force-delete", help=( "Do/Don't abort synchronization when all items are about " "to be deleted from both sides." ), ) @max_workers_option() @pass_context @catch_errors def sync(ctx, collections, force_delete, max_workers): """ Synchronize the given collections or pairs. If no arguments are given, all will be synchronized. This command will not synchronize metadata, use `vdirsyncer metasync` for that. \b \b\bExamples: # Sync everything configured vdirsyncer sync \b # Sync the pairs "bob" and "frank" vdirsyncer sync bob frank \b # Sync only "first_collection" from the pair "bob" vdirsyncer sync bob/first_collection """ from .tasks import prepare_pair, sync_collection from .utils import WorkerQueue wq = WorkerQueue(max_workers) with wq.join(): for pair_name, collections in collections: wq.put( functools.partial( prepare_pair, pair_name=pair_name, collections=collections, config=ctx.config, force_delete=force_delete, callback=sync_collection, ) ) wq.spawn_worker() @app.command() @collections_arg @max_workers_option() @pass_context @catch_errors def metasync(ctx, collections, max_workers): """ Synchronize metadata of the given collections or pairs. See the `sync` command for usage. """ from .tasks import prepare_pair, metasync_collection from .utils import WorkerQueue wq = WorkerQueue(max_workers) with wq.join(): for pair_name, collections in collections: wq.put( functools.partial( prepare_pair, pair_name=pair_name, collections=collections, config=ctx.config, callback=metasync_collection, ) ) wq.spawn_worker() @app.command() @click.argument("pairs", nargs=-1) @click.option( "--list/--no-list", default=True, help=( "Whether to list all collections from both sides during discovery, " "for debugging. This is slow and may crash for broken servers." ), ) @max_workers_option(default=1) @pass_context @catch_errors def discover(ctx, pairs, max_workers, list): """ Refresh collection cache for the given pairs. """ from .tasks import discover_collections from .utils import WorkerQueue config = ctx.config wq = WorkerQueue(max_workers) with wq.join(): for pair_name in pairs or config.pairs: pair = config.get_pair(pair_name) wq.put( functools.partial( discover_collections, status_path=config.general["status_path"], pair=pair, from_cache=False, list_collections=list, ) ) wq.spawn_worker() @app.command() @click.argument("collection") @click.option( "--repair-unsafe-uid/--no-repair-unsafe-uid", default=False, help=( "Some characters in item UIDs and URLs may cause problems " "with buggy software. Adding this option will reassign " "new UIDs to those items. This is disabled by default, " "which is equivalent to `--no-repair-unsafe-uid`." ), ) @pass_context @catch_errors def repair(ctx, collection, repair_unsafe_uid): """ Repair a given collection. Runs a few checks on the collection and applies some fixes to individual items that may improve general stability, also with other CalDAV/CardDAV clients. In particular, if you encounter URL-encoding-related issues with other clients, this command with --repair-unsafe-uid might help. \b \b\bExamples: # Repair the `foo` collection of the `calendars_local` storage vdirsyncer repair calendars_local/foo """ from .tasks import repair_collection cli_logger.warning("This operation will take a very long time.") cli_logger.warning( "It's recommended to make a backup and " "turn off other client's synchronization features." ) click.confirm("Do you want to continue?", abort=True) repair_collection(ctx.config, collection, repair_unsafe_uid=repair_unsafe_uid) vdirsyncer-0.18.0/vdirsyncer/cli/config.py000066400000000000000000000265521406140636100205610ustar00rootroot00000000000000import json import os import string from configparser import RawConfigParser from itertools import chain from click_threading import get_ui_worker from .. import exceptions from .. import PROJECT_HOME from ..utils import cached_property from ..utils import expand_path from .fetchparams import expand_fetch_params from .utils import storage_class_from_config GENERAL_ALL = frozenset(["status_path"]) GENERAL_REQUIRED = frozenset(["status_path"]) SECTION_NAME_CHARS = frozenset(chain(string.ascii_letters, string.digits, "_")) def validate_section_name(name, section_type): invalid = set(name) - SECTION_NAME_CHARS if invalid: chars_display = "".join(sorted(SECTION_NAME_CHARS)) raise exceptions.UserError( 'The {}-section "{}" contains invalid characters. Only ' "the following characters are allowed for storage and " "pair names:\n{}".format(section_type, name, chars_display) ) def _validate_general_section(general_config): invalid = set(general_config) - GENERAL_ALL missing = GENERAL_REQUIRED - set(general_config) problems = [] if invalid: problems.append( "general section doesn't take the parameters: {}".format(", ".join(invalid)) ) if missing: problems.append( "general section is missing the parameters: {}".format(", ".join(missing)) ) if problems: raise exceptions.UserError( "Invalid general section. Copy the example " "config from the repository and edit it: {}".format(PROJECT_HOME), problems=problems, ) def _validate_collections_param(collections): if collections is None: return if not isinstance(collections, list): raise ValueError("`collections` parameter must be a list or `null`.") collection_names = set() for i, collection in enumerate(collections): try: if isinstance(collection, (str, bytes)): collection_name = collection elif isinstance(collection, list): e = ValueError( "Expected list of format " '["config_name", "storage_a_name", "storage_b_name"]' ) if len(collection) != 3: raise e if not isinstance(collection[0], (str, bytes)): raise e for x in collection[1:]: if x is not None and not isinstance(x, (str, bytes)): raise e collection_name = collection[0] else: raise ValueError("Expected string or list of three strings.") if collection_name in collection_names: raise ValueError("Duplicate value.") collection_names.add(collection_name) except ValueError as e: raise ValueError( "`collections` parameter, position {i}: {e}".format(i=i, e=str(e)) ) class _ConfigReader: def __init__(self, f): self._file = f self._parser = c = RawConfigParser() c.read_file(f) self._seen_names = set() self._general = {} self._pairs = {} self._storages = {} def _parse_section(self, section_type, name, options): validate_section_name(name, section_type) if name in self._seen_names: raise ValueError(f'Name "{name}" already used.') self._seen_names.add(name) if section_type == "general": if self._general: raise ValueError("More than one general section.") self._general = options elif section_type == "storage": self._storages[name] = options elif section_type == "pair": self._pairs[name] = options else: raise ValueError("Unknown section type.") def parse(self): for section in self._parser.sections(): if " " in section: section_type, name = section.split(" ", 1) else: section_type = name = section try: self._parse_section( section_type, name, dict(_parse_options(self._parser.items(section), section=section)), ) except ValueError as e: raise exceptions.UserError('Section "{}": {}'.format(section, str(e))) _validate_general_section(self._general) if getattr(self._file, "name", None): self._general["status_path"] = os.path.join( os.path.dirname(self._file.name), expand_path(self._general["status_path"]), ) return self._general, self._pairs, self._storages def _parse_options(items, section=None): for key, value in items: try: yield key, json.loads(value) except ValueError as e: raise ValueError('Section "{}", option "{}": {}'.format(section, key, e)) class Config: def __init__(self, general, pairs, storages): self.general = general self.storages = storages for name, options in storages.items(): options["instance_name"] = name self.pairs = {} for name, options in pairs.items(): try: self.pairs[name] = PairConfig(self, name, options) except ValueError as e: raise exceptions.UserError(f"Pair {name}: {e}") @classmethod def from_fileobject(cls, f): reader = _ConfigReader(f) return cls(*reader.parse()) @classmethod def from_filename_or_environment(cls, fname=None): if fname is None: fname = os.environ.get("VDIRSYNCER_CONFIG", None) if fname is None: fname = expand_path("~/.vdirsyncer/config") if not os.path.exists(fname): xdg_config_dir = os.environ.get( "XDG_CONFIG_HOME", expand_path("~/.config/") ) fname = os.path.join(xdg_config_dir, "vdirsyncer/config") try: with open(fname) as f: return cls.from_fileobject(f) except Exception as e: raise exceptions.UserError( "Error during reading config {}: {}".format(fname, e) ) def get_storage_args(self, storage_name): try: args = self.storages[storage_name] except KeyError: raise exceptions.UserError( "Storage {!r} not found. " "These are the configured storages: {}".format( storage_name, list(self.storages) ) ) else: return expand_fetch_params(args) def get_pair(self, pair_name): try: return self.pairs[pair_name] except KeyError as e: raise exceptions.PairNotFound(e, pair_name=pair_name) class PairConfig: def __init__(self, full_config, name, options): self._config = full_config self.name = name self.name_a = options.pop("a") self.name_b = options.pop("b") self._partial_sync = options.pop("partial_sync", None) self.metadata = options.pop("metadata", None) or () self.conflict_resolution = self._process_conflict_resolution_param( options.pop("conflict_resolution", None) ) try: self.collections = options.pop("collections") except KeyError: raise ValueError( "collections parameter missing.\n\n" "As of 0.9.0 this parameter has no default anymore. " "Set `collections = null` explicitly in your pair config." ) else: _validate_collections_param(self.collections) if options: raise ValueError("Unknown options: {}".format(", ".join(options))) def _process_conflict_resolution_param(self, conflict_resolution): if conflict_resolution in (None, "a wins", "b wins"): return conflict_resolution elif ( isinstance(conflict_resolution, list) and len(conflict_resolution) > 1 and conflict_resolution[0] == "command" ): def resolve(a, b): a_name = self.config_a["instance_name"] b_name = self.config_b["instance_name"] command = conflict_resolution[1:] def inner(): return _resolve_conflict_via_command(a, b, command, a_name, b_name) ui_worker = get_ui_worker() return ui_worker.put(inner) return resolve else: raise ValueError("Invalid value for `conflict_resolution`.") # The following parameters are lazily evaluated because evaluating # self.config_a would expand all `x.fetch` parameters. This is costly and # unnecessary if the pair is not actually synced. @cached_property def config_a(self): return self._config.get_storage_args(self.name_a) @cached_property def config_b(self): return self._config.get_storage_args(self.name_b) @cached_property def partial_sync(self): partial_sync = self._partial_sync # We need to use UserError here because ValueError is not # caught at the time this is expanded. if partial_sync is not None: cls_a, _ = storage_class_from_config(self.config_a) cls_b, _ = storage_class_from_config(self.config_b) if ( not cls_a.read_only and not self.config_a.get("read_only", False) and not cls_b.read_only and not self.config_b.get("read_only", False) ): raise exceptions.UserError( "`partial_sync` is only effective if one storage is " "read-only. Use `read_only = true` in exactly one storage " "section." ) if partial_sync is None: partial_sync = "revert" if partial_sync not in ("ignore", "revert", "error"): raise exceptions.UserError("Invalid value for `partial_sync`.") return partial_sync class CollectionConfig: def __init__(self, pair, name, config_a, config_b): self.pair = pair self._config = pair._config self.name = name self.config_a = config_a self.config_b = config_b #: Public API. Khal's config wizard depends on this function. load_config = Config.from_filename_or_environment def _resolve_conflict_via_command(a, b, command, a_name, b_name, _check_call=None): import tempfile import shutil if _check_call is None: from subprocess import check_call as _check_call from ..vobject import Item dir = tempfile.mkdtemp(prefix="vdirsyncer-conflict.") try: a_tmp = os.path.join(dir, a_name) b_tmp = os.path.join(dir, b_name) with open(a_tmp, "w") as f: f.write(a.raw) with open(b_tmp, "w") as f: f.write(b.raw) command[0] = expand_path(command[0]) _check_call(command + [a_tmp, b_tmp]) with open(a_tmp) as f: new_a = f.read() with open(b_tmp) as f: new_b = f.read() if new_a != new_b: raise exceptions.UserError("The two files are not completely " "equal.") return Item(new_a) finally: shutil.rmtree(dir) vdirsyncer-0.18.0/vdirsyncer/cli/discover.py000066400000000000000000000166151406140636100211310ustar00rootroot00000000000000import hashlib import json import logging import sys from .. import exceptions from ..utils import cached_property from .utils import handle_collection_not_found from .utils import handle_storage_init_error from .utils import load_status from .utils import save_status from .utils import storage_class_from_config from .utils import storage_instance_from_config # Increase whenever upgrade potentially breaks discovery cache and collections # should be re-discovered DISCOVERY_CACHE_VERSION = 1 logger = logging.getLogger(__name__) def _get_collections_cache_key(pair): m = hashlib.sha256() j = json.dumps( [ DISCOVERY_CACHE_VERSION, pair.collections, pair.config_a, pair.config_b, ], sort_keys=True, ) m.update(j.encode("utf-8")) return m.hexdigest() def collections_for_pair(status_path, pair, from_cache=True, list_collections=False): """Determine all configured collections for a given pair. Takes care of shortcut expansion and result caching. :param status_path: The path to the status directory. :param from_cache: Whether to load from cache (aborting on cache miss) or discover and save to cache. :returns: iterable of (collection, (a_args, b_args)) """ cache_key = _get_collections_cache_key(pair) if from_cache: rv = load_status(status_path, pair.name, data_type="collections") if rv and rv.get("cache_key", None) == cache_key: return list( _expand_collections_cache( rv["collections"], pair.config_a, pair.config_b ) ) elif rv: raise exceptions.UserError( "Detected change in config file, " "please run `vdirsyncer discover {}`.".format(pair.name) ) else: raise exceptions.UserError( "Please run `vdirsyncer discover {}` " " before synchronization.".format(pair.name) ) logger.info("Discovering collections for pair {}".format(pair.name)) a_discovered = _DiscoverResult(pair.config_a) b_discovered = _DiscoverResult(pair.config_b) if list_collections: _print_collections(pair.config_a["instance_name"], a_discovered.get_self) _print_collections(pair.config_b["instance_name"], b_discovered.get_self) # We have to use a list here because the special None/null value would get # mangled to string (because JSON objects always have string keys). rv = list( expand_collections( shortcuts=pair.collections, config_a=pair.config_a, config_b=pair.config_b, get_a_discovered=a_discovered.get_self, get_b_discovered=b_discovered.get_self, _handle_collection_not_found=handle_collection_not_found, ) ) _sanity_check_collections(rv) save_status( status_path, pair.name, data_type="collections", data={ "collections": list( _compress_collections_cache(rv, pair.config_a, pair.config_b) ), "cache_key": cache_key, }, ) return rv def _sanity_check_collections(collections): for _, (a_args, b_args) in collections: storage_instance_from_config(a_args) storage_instance_from_config(b_args) def _compress_collections_cache(collections, config_a, config_b): def deduplicate(x, y): rv = {} for key, value in x.items(): if key not in y or y[key] != value: rv[key] = value return rv for name, (a, b) in collections: yield name, (deduplicate(a, config_a), deduplicate(b, config_b)) def _expand_collections_cache(collections, config_a, config_b): for name, (a_delta, b_delta) in collections: a = dict(config_a) a.update(a_delta) b = dict(config_b) b.update(b_delta) yield name, (a, b) class _DiscoverResult: def __init__(self, config): self._cls, _ = storage_class_from_config(config) self._config = config def get_self(self): return self._discovered @cached_property def _discovered(self): try: discovered = list(self._cls.discover(**self._config)) except NotImplementedError: return {} except Exception: return handle_storage_init_error(self._cls, self._config) else: storage_type = self._config["type"] rv = {} for args in discovered: args["type"] = storage_type rv[args["collection"]] = args return rv def expand_collections( shortcuts, config_a, config_b, get_a_discovered, get_b_discovered, _handle_collection_not_found, ): handled_collections = set() if shortcuts is None: shortcuts = [None] for shortcut in shortcuts: if shortcut == "from a": collections = get_a_discovered() elif shortcut == "from b": collections = get_b_discovered() else: collections = [shortcut] for collection in collections: if isinstance(collection, list): collection, collection_a, collection_b = collection else: collection_a = collection_b = collection if collection in handled_collections: continue handled_collections.add(collection) a_args = _collection_from_discovered( get_a_discovered, collection_a, config_a, _handle_collection_not_found ) b_args = _collection_from_discovered( get_b_discovered, collection_b, config_b, _handle_collection_not_found ) yield collection, (a_args, b_args) def _collection_from_discovered( get_discovered, collection, config, _handle_collection_not_found ): if collection is None: args = dict(config) args["collection"] = None return args try: return get_discovered()[collection] except KeyError: return _handle_collection_not_found(config, collection) def _print_collections(instance_name, get_discovered): try: discovered = get_discovered() except exceptions.UserError: raise except Exception: # Unless discovery failed due to a user-inflicted error (instanceof # UserError), we don't even know if the storage supports discovery # properly. So we can't abort. import traceback logger.debug("".join(traceback.format_tb(sys.exc_info()[2]))) logger.warning( "Failed to discover collections for {}, use `-vdebug` " "to see the full traceback.".format(instance_name) ) return logger.info(f"{instance_name}:") for args in discovered.values(): collection = args["collection"] if collection is None: continue args["instance_name"] = instance_name try: storage = storage_instance_from_config(args, create=False) displayname = storage.get_meta("displayname") except Exception: displayname = "" logger.info( " - {}{}".format( json.dumps(collection), f' ("{displayname}")' if displayname and displayname != collection else "", ) ) vdirsyncer-0.18.0/vdirsyncer/cli/fetchparams.py000066400000000000000000000047231406140636100216050ustar00rootroot00000000000000import logging import click from . import AppContext from .. import exceptions from ..utils import expand_path from ..utils import synchronized SUFFIX = ".fetch" logger = logging.getLogger(__name__) def expand_fetch_params(config): config = dict(config) for key in list(config): if not key.endswith(SUFFIX): continue newkey = key[: -len(SUFFIX)] if newkey in config: raise ValueError(f"Can't set {key} and {newkey}.") config[newkey] = _fetch_value(config[key], key) del config[key] return config @synchronized() def _fetch_value(opts, key): if not isinstance(opts, list): raise ValueError( "Invalid value for {}: Expected a list, found {!r}.".format(key, opts) ) if not opts: raise ValueError("Expected list of length > 0.") try: ctx = click.get_current_context().find_object(AppContext) if ctx is None: raise RuntimeError() password_cache = ctx.fetched_params except RuntimeError: password_cache = {} cache_key = tuple(opts) if cache_key in password_cache: rv = password_cache[cache_key] logger.debug(f"Found cached value for {opts!r}.") if isinstance(rv, BaseException): raise rv return rv strategy = opts[0] try: strategy_fn = STRATEGIES[strategy] except KeyError: raise exceptions.UserError(f"Unknown strategy: {strategy}") logger.debug("Fetching value for {} with {} strategy.".format(key, strategy)) try: rv = strategy_fn(*opts[1:]) except (click.Abort, KeyboardInterrupt) as e: password_cache[cache_key] = e raise else: if not rv: raise exceptions.UserError( "Empty value for {}, this most likely " "indicates an error.".format(key) ) password_cache[cache_key] = rv return rv def _strategy_command(*command): import subprocess command = (expand_path(command[0]),) + command[1:] try: stdout = subprocess.check_output(command, universal_newlines=True) return stdout.strip("\n") except OSError as e: raise exceptions.UserError( "Failed to execute command: {}\n{}".format(" ".join(command), str(e)) ) def _strategy_prompt(text): return click.prompt(text, hide_input=True) STRATEGIES = { "command": _strategy_command, "prompt": _strategy_prompt, } vdirsyncer-0.18.0/vdirsyncer/cli/tasks.py000066400000000000000000000116441406140636100204350ustar00rootroot00000000000000import functools import json from .. import exceptions from .. import sync from .config import CollectionConfig from .discover import collections_for_pair from .discover import storage_class_from_config from .discover import storage_instance_from_config from .utils import cli_logger from .utils import get_status_name from .utils import handle_cli_error from .utils import JobFailed from .utils import load_status from .utils import manage_sync_status from .utils import save_status def prepare_pair(wq, pair_name, collections, config, callback, **kwargs): pair = config.get_pair(pair_name) all_collections = dict( collections_for_pair(status_path=config.general["status_path"], pair=pair) ) # spawn one worker less because we can reuse the current one new_workers = -1 for collection_name in collections or all_collections: try: config_a, config_b = all_collections[collection_name] except KeyError: raise exceptions.UserError( "Pair {}: Collection {} not found. These are the " "configured collections:\n{}".format( pair_name, json.dumps(collection_name), list(all_collections) ) ) new_workers += 1 collection = CollectionConfig(pair, collection_name, config_a, config_b) wq.put( functools.partial( callback, collection=collection, general=config.general, **kwargs ) ) for _ in range(new_workers): wq.spawn_worker() def sync_collection(wq, collection, general, force_delete): pair = collection.pair status_name = get_status_name(pair.name, collection.name) try: cli_logger.info(f"Syncing {status_name}") a = storage_instance_from_config(collection.config_a) b = storage_instance_from_config(collection.config_b) sync_failed = False def error_callback(e): nonlocal sync_failed sync_failed = True handle_cli_error(status_name, e) with manage_sync_status( general["status_path"], pair.name, collection.name ) as status: sync.sync( a, b, status, conflict_resolution=pair.conflict_resolution, force_delete=force_delete, error_callback=error_callback, partial_sync=pair.partial_sync, ) if sync_failed: raise JobFailed() except JobFailed: raise except BaseException: handle_cli_error(status_name) raise JobFailed() def discover_collections(wq, pair, **kwargs): rv = collections_for_pair(pair=pair, **kwargs) collections = list(c for c, (a, b) in rv) if collections == [None]: collections = None cli_logger.info( "Saved for {}: collections = {}".format(pair.name, json.dumps(collections)) ) def repair_collection(config, collection, repair_unsafe_uid): from ..repair import repair_storage storage_name, collection = collection, None if "/" in storage_name: storage_name, collection = storage_name.split("/") config = config.get_storage_args(storage_name) storage_type = config["type"] if collection is not None: cli_logger.info("Discovering collections (skipping cache).") cls, config = storage_class_from_config(config) for config in cls.discover(**config): if config["collection"] == collection: break else: raise exceptions.UserError( "Couldn't find collection {} for storage {}.".format( collection, storage_name ) ) config["type"] = storage_type storage = storage_instance_from_config(config) cli_logger.info(f"Repairing {storage_name}/{collection}") cli_logger.warning("Make sure no other program is talking to the server.") repair_storage(storage, repair_unsafe_uid=repair_unsafe_uid) def metasync_collection(wq, collection, general): from ..metasync import metasync pair = collection.pair status_name = get_status_name(pair.name, collection.name) try: cli_logger.info(f"Metasyncing {status_name}") status = ( load_status( general["status_path"], pair.name, collection.name, data_type="metadata" ) or {} ) a = storage_instance_from_config(collection.config_a) b = storage_instance_from_config(collection.config_b) metasync( a, b, status, conflict_resolution=pair.conflict_resolution, keys=pair.metadata, ) except BaseException: handle_cli_error(status_name) raise JobFailed() save_status( general["status_path"], pair.name, collection.name, data_type="metadata", data=status, ) vdirsyncer-0.18.0/vdirsyncer/cli/utils.py000066400000000000000000000325641406140636100204540ustar00rootroot00000000000000import contextlib import errno import importlib import itertools import json import os import queue import sys import click import click_threading from atomicwrites import atomic_write from . import cli_logger from .. import BUGTRACKER_HOME from .. import DOCS_HOME from .. import exceptions from ..sync.exceptions import IdentConflict from ..sync.exceptions import PartialSync from ..sync.exceptions import StorageEmpty from ..sync.exceptions import SyncConflict from ..sync.status import SqliteStatus from ..utils import expand_path from ..utils import get_storage_init_args STATUS_PERMISSIONS = 0o600 STATUS_DIR_PERMISSIONS = 0o700 class _StorageIndex: def __init__(self): self._storages = dict( caldav="vdirsyncer.storage.dav.CalDAVStorage", carddav="vdirsyncer.storage.dav.CardDAVStorage", filesystem="vdirsyncer.storage.filesystem.FilesystemStorage", http="vdirsyncer.storage.http.HttpStorage", singlefile="vdirsyncer.storage.singlefile.SingleFileStorage", google_calendar="vdirsyncer.storage.google.GoogleCalendarStorage", google_contacts="vdirsyncer.storage.google.GoogleContactsStorage", etesync_calendars="vdirsyncer.storage.etesync.EtesyncCalendars", etesync_contacts="vdirsyncer.storage.etesync.EtesyncContacts", ) def __getitem__(self, name): item = self._storages[name] if not isinstance(item, str): return item modname, clsname = item.rsplit(".", 1) mod = importlib.import_module(modname) self._storages[name] = rv = getattr(mod, clsname) assert rv.storage_name == name return rv storage_names = _StorageIndex() del _StorageIndex class JobFailed(RuntimeError): pass def handle_cli_error(status_name=None, e=None): """ Print a useful error message for the current exception. This is supposed to catch all exceptions, and should never raise any exceptions itself. """ try: if e is not None: raise e else: raise except exceptions.UserError as e: cli_logger.critical(e) except StorageEmpty as e: cli_logger.error( '{status_name}: Storage "{name}" was completely emptied. If you ' "want to delete ALL entries on BOTH sides, then use " "`vdirsyncer sync --force-delete {status_name}`. " "Otherwise delete the files for {status_name} in your status " "directory.".format( name=e.empty_storage.instance_name, status_name=status_name ) ) except PartialSync as e: cli_logger.error( "{status_name}: Attempted change on {storage}, which is read-only" ". Set `partial_sync` in your pair section to `ignore` to ignore " "those changes, or `revert` to revert them on the other side.".format( status_name=status_name, storage=e.storage ) ) except SyncConflict as e: cli_logger.error( "{status_name}: One item changed on both sides. Resolve this " "conflict manually, or by setting the `conflict_resolution` " "parameter in your config file.\n" "See also {docs}/config.html#pair-section\n" "Item ID: {e.ident}\n" "Item href on side A: {e.href_a}\n" "Item href on side B: {e.href_b}\n".format( status_name=status_name, e=e, docs=DOCS_HOME ) ) except IdentConflict as e: cli_logger.error( '{status_name}: Storage "{storage.instance_name}" contains ' "multiple items with the same UID or even content. Vdirsyncer " "will now abort the synchronization of this collection, because " "the fix for this is not clear; It could be the result of a badly " "behaving server. You can try running:\n\n" " vdirsyncer repair {storage.instance_name}\n\n" "But make sure to have a backup of your data in some form. The " "offending hrefs are:\n\n{href_list}\n".format( status_name=status_name, storage=e.storage, href_list="\n".join(map(repr, e.hrefs)), ) ) except (click.Abort, KeyboardInterrupt, JobFailed): pass except exceptions.PairNotFound as e: cli_logger.error( "Pair {pair_name} does not exist. Please check your " "configuration file and make sure you've typed the pair name " "correctly".format(pair_name=e.pair_name) ) except exceptions.InvalidResponse as e: cli_logger.error( "The server returned something vdirsyncer doesn't understand. " "Error message: {!r}\n" "While this is most likely a serverside problem, the vdirsyncer " "devs are generally interested in such bugs. Please report it in " "the issue tracker at {}".format(e, BUGTRACKER_HOME) ) except exceptions.CollectionRequired: cli_logger.error( "One or more storages don't support `collections = null`. " 'You probably want to set `collections = ["from a", "from b"]`.' ) except Exception as e: tb = sys.exc_info()[2] import traceback tb = traceback.format_tb(tb) if status_name: msg = f"Unknown error occurred for {status_name}" else: msg = "Unknown error occurred" msg += f": {e}\nUse `-vdebug` to see the full traceback." cli_logger.error(msg) cli_logger.debug("".join(tb)) def get_status_name(pair, collection): if collection is None: return pair return pair + "/" + collection def get_status_path(base_path, pair, collection=None, data_type=None): assert data_type is not None status_name = get_status_name(pair, collection) path = expand_path(os.path.join(base_path, status_name)) if os.path.isfile(path) and data_type == "items": new_path = path + ".items" # XXX: Legacy migration cli_logger.warning( "Migrating statuses: Renaming {} to {}".format(path, new_path) ) os.rename(path, new_path) path += "." + data_type return path def load_status(base_path, pair, collection=None, data_type=None): path = get_status_path(base_path, pair, collection, data_type) if not os.path.exists(path): return None assert_permissions(path, STATUS_PERMISSIONS) with open(path) as f: try: return dict(json.load(f)) except ValueError: pass return {} def prepare_status_path(path): dirname = os.path.dirname(path) try: os.makedirs(dirname, STATUS_DIR_PERMISSIONS) except OSError as e: if e.errno != errno.EEXIST: raise @contextlib.contextmanager def manage_sync_status(base_path, pair_name, collection_name): path = get_status_path(base_path, pair_name, collection_name, "items") status = None legacy_status = None try: # XXX: Legacy migration with open(path, "rb") as f: if f.read(1) == b"{": f.seek(0) legacy_status = dict(json.load(f)) except (OSError, ValueError): pass if legacy_status is not None: cli_logger.warning("Migrating legacy status to sqlite") os.remove(path) status = SqliteStatus(path) status.load_legacy_status(legacy_status) else: prepare_status_path(path) status = SqliteStatus(path) yield status def save_status(base_path, pair, collection=None, data_type=None, data=None): assert data_type is not None assert data is not None status_name = get_status_name(pair, collection) path = expand_path(os.path.join(base_path, status_name)) + "." + data_type prepare_status_path(path) with atomic_write(path, mode="w", overwrite=True) as f: json.dump(data, f) os.chmod(path, STATUS_PERMISSIONS) def storage_class_from_config(config): config = dict(config) storage_name = config.pop("type") try: cls = storage_names[storage_name] except KeyError: raise exceptions.UserError(f"Unknown storage type: {storage_name}") return cls, config def storage_instance_from_config(config, create=True): """ :param config: A configuration dictionary to pass as kwargs to the class corresponding to config['type'] """ cls, new_config = storage_class_from_config(config) try: return cls(**new_config) except exceptions.CollectionNotFound as e: if create: config = handle_collection_not_found( config, config.get("collection", None), e=str(e) ) return storage_instance_from_config(config, create=False) else: raise except Exception: return handle_storage_init_error(cls, new_config) def handle_storage_init_error(cls, config): e = sys.exc_info()[1] if not isinstance(e, TypeError) or "__init__" not in repr(e): raise all, required = get_storage_init_args(cls) given = set(config) missing = required - given invalid = given - all problems = [] if missing: problems.append( "{} storage requires the parameters: {}".format( cls.storage_name, ", ".join(missing) ) ) if invalid: problems.append( "{} storage doesn't take the parameters: {}".format( cls.storage_name, ", ".join(invalid) ) ) if not problems: raise e raise exceptions.UserError( "Failed to initialize {}".format(config["instance_name"]), problems=problems ) class WorkerQueue: """ A simple worker-queue setup. Note that workers quit if queue is empty. That means you have to first put things into the queue before spawning the worker! """ def __init__(self, max_workers): self._queue = queue.Queue() self._workers = [] self._max_workers = max_workers self._shutdown_handlers = [] # According to http://stackoverflow.com/a/27062830, those are # threadsafe compared to increasing a simple integer variable. self.num_done_tasks = itertools.count() self.num_failed_tasks = itertools.count() def shutdown(self): while self._shutdown_handlers: try: self._shutdown_handlers.pop()() except Exception: pass def _worker(self): while True: try: func = self._queue.get(False) except queue.Empty: break try: func(wq=self) except Exception: handle_cli_error() next(self.num_failed_tasks) finally: self._queue.task_done() next(self.num_done_tasks) if not self._queue.unfinished_tasks: self.shutdown() def spawn_worker(self): if self._max_workers and len(self._workers) >= self._max_workers: return t = click_threading.Thread(target=self._worker) t.start() self._workers.append(t) @contextlib.contextmanager def join(self): assert self._workers or not self._queue.unfinished_tasks ui_worker = click_threading.UiWorker() self._shutdown_handlers.append(ui_worker.shutdown) _echo = click.echo with ui_worker.patch_click(): yield if not self._workers: # Ugly hack, needed because ui_worker is not running. click.echo = _echo cli_logger.critical("Nothing to do.") sys.exit(5) ui_worker.run() self._queue.join() for worker in self._workers: worker.join() tasks_failed = next(self.num_failed_tasks) tasks_done = next(self.num_done_tasks) if tasks_failed > 0: cli_logger.error( "{} out of {} tasks failed.".format(tasks_failed, tasks_done) ) sys.exit(1) def put(self, f): return self._queue.put(f) def assert_permissions(path, wanted): permissions = os.stat(path).st_mode & 0o777 if permissions > wanted: cli_logger.warning( "Correcting permissions of {} from {:o} to {:o}".format( path, permissions, wanted ) ) os.chmod(path, wanted) def handle_collection_not_found(config, collection, e=None): storage_name = config.get("instance_name", None) cli_logger.warning( "{}No collection {} found for storage {}.".format( f"{e}\n" if e else "", json.dumps(collection), storage_name ) ) if click.confirm("Should vdirsyncer attempt to create it?"): storage_type = config["type"] cls, config = storage_class_from_config(config) config["collection"] = collection try: args = cls.create_collection(**config) args["type"] = storage_type return args except NotImplementedError as e: cli_logger.error(e) raise exceptions.UserError( 'Unable to find or create collection "{collection}" for ' 'storage "{storage}". Please create the collection ' "yourself.".format(collection=collection, storage=storage_name) ) vdirsyncer-0.18.0/vdirsyncer/exceptions.py000066400000000000000000000034311406140636100207150ustar00rootroot00000000000000""" Contains exception classes used by vdirsyncer. Not all exceptions are here, only the most commonly used ones. """ class Error(Exception): """Baseclass for all errors.""" def __init__(self, *args, **kwargs): for key, value in kwargs.items(): if getattr(self, key, object()) is not None: # pragma: no cover raise TypeError(f"Invalid argument: {key}") setattr(self, key, value) super().__init__(*args) class UserError(Error, ValueError): """Wrapper exception to be used to signify the traceback should not be shown to the user.""" problems = None def __str__(self): msg = Error.__str__(self) for problem in self.problems or (): msg += f"\n - {problem}" return msg class CollectionNotFound(Error): """Collection not found""" class PairNotFound(Error): """Pair not found""" pair_name = None class PreconditionFailed(Error): """ - The item doesn't exist although it should - The item exists although it shouldn't - The etags don't match. Due to CalDAV we can't actually say which error it is. This error may indicate race conditions. """ class NotFoundError(PreconditionFailed): """Item not found""" class AlreadyExistingError(PreconditionFailed): """Item already exists.""" existing_href = None class WrongEtagError(PreconditionFailed): """Wrong etag""" class ReadOnlyError(Error): """Storage is read-only.""" class InvalidResponse(Error, ValueError): """The backend returned an invalid result.""" class UnsupportedMetadataError(Error, NotImplementedError): """The storage doesn't support this type of metadata.""" class CollectionRequired(Error): """`collection = null` is not allowed.""" vdirsyncer-0.18.0/vdirsyncer/http.py000066400000000000000000000137551406140636100175250ustar00rootroot00000000000000import logging import requests from . import __version__ from . import DOCS_HOME from . import exceptions from .utils import expand_path logger = logging.getLogger(__name__) USERAGENT = f"vdirsyncer/{__version__}" def _detect_faulty_requests(): # pragma: no cover text = ( "Error during import: {e}\n\n" "If you have installed vdirsyncer from a distro package, please file " "a bug against that package, not vdirsyncer.\n\n" "Consult {d}/problems.html#requests-related-importerrors" "-based-distributions on how to work around this." ) try: from requests_toolbelt.auth.guess import GuessAuth # noqa except ImportError as e: import sys print(text.format(e=str(e), d=DOCS_HOME), file=sys.stderr) sys.exit(1) _detect_faulty_requests() del _detect_faulty_requests def prepare_auth(auth, username, password): if username and password: if auth == "basic" or auth is None: return (username, password) elif auth == "digest": from requests.auth import HTTPDigestAuth return HTTPDigestAuth(username, password) elif auth == "guess": try: from requests_toolbelt.auth.guess import GuessAuth except ImportError: raise exceptions.UserError( "Your version of requests_toolbelt is too " "old for `guess` authentication. At least " "version 0.4.0 is required." ) else: return GuessAuth(username, password) else: raise exceptions.UserError("Unknown authentication method: {}".format(auth)) elif auth: raise exceptions.UserError( "You need to specify username and password " "for {} authentication.".format(auth) ) else: return None def prepare_verify(verify, verify_fingerprint): if isinstance(verify, (str, bytes)): verify = expand_path(verify) elif not isinstance(verify, bool): raise exceptions.UserError( "Invalid value for verify ({}), " "must be a path to a PEM-file or boolean.".format(verify) ) if verify_fingerprint is not None: if not isinstance(verify_fingerprint, (bytes, str)): raise exceptions.UserError( "Invalid value for verify_fingerprint " "({}), must be a string or null.".format(verify_fingerprint) ) elif not verify: raise exceptions.UserError( "Disabling all SSL validation is forbidden. Consider setting " "verify_fingerprint if you have a broken or self-signed cert." ) return { "verify": verify, "verify_fingerprint": verify_fingerprint, } def prepare_client_cert(cert): if isinstance(cert, (str, bytes)): cert = expand_path(cert) elif isinstance(cert, list): cert = tuple(map(prepare_client_cert, cert)) return cert def _install_fingerprint_adapter(session, fingerprint): prefix = "https://" try: from requests_toolbelt.adapters.fingerprint import FingerprintAdapter except ImportError: raise RuntimeError( "`verify_fingerprint` can only be used with " "requests-toolbelt versions >= 0.4.0" ) if not isinstance(session.adapters[prefix], FingerprintAdapter): fingerprint_adapter = FingerprintAdapter(fingerprint) session.mount(prefix, fingerprint_adapter) def request( method, url, session=None, latin1_fallback=True, verify_fingerprint=None, **kwargs ): """ Wrapper method for requests, to ease logging and mocking. Parameters should be the same as for ``requests.request``, except: :param session: A requests session object to use. :param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the expected server certificate. :param latin1_fallback: RFC-2616 specifies the default Content-Type of text/* to be latin1, which is not always correct, but exactly what requests is doing. Setting this parameter to False will use charset autodetection (usually ending up with utf8) instead of plainly falling back to this silly default. See https://github.com/kennethreitz/requests/issues/2042 """ if session is None: session = requests.Session() if verify_fingerprint is not None: _install_fingerprint_adapter(session, verify_fingerprint) session.hooks = {"response": _fix_redirects} func = session.request logger.debug("=" * 20) logger.debug(f"{method} {url}") logger.debug(kwargs.get("headers", {})) logger.debug(kwargs.get("data", None)) logger.debug("Sending request...") assert isinstance(kwargs.get("data", b""), bytes) r = func(method, url, **kwargs) # See https://github.com/kennethreitz/requests/issues/2042 content_type = r.headers.get("Content-Type", "") if ( not latin1_fallback and "charset" not in content_type and content_type.startswith("text/") ): logger.debug("Removing latin1 fallback") r.encoding = None logger.debug(r.status_code) logger.debug(r.headers) logger.debug(r.content) if r.status_code == 412: raise exceptions.PreconditionFailed(r.reason) if r.status_code in (404, 410): raise exceptions.NotFoundError(r.reason) r.raise_for_status() return r def _fix_redirects(r, *args, **kwargs): """ Requests discards of the body content when it is following a redirect that is not a 307 or 308. We never want that to happen. See: https://github.com/kennethreitz/requests/issues/3915 https://github.com/pimutils/vdirsyncer/pull/585 https://github.com/pimutils/vdirsyncer/issues/586 FIXME: This solution isn't very nice. A new hook in requests would be better. """ if r.is_redirect: logger.debug("Rewriting status code from %s to 307", r.status_code) r.status_code = 307 vdirsyncer-0.18.0/vdirsyncer/metasync.py000066400000000000000000000031551406140636100203620ustar00rootroot00000000000000import logging from . import exceptions from .storage.base import normalize_meta_value logger = logging.getLogger(__name__) class MetaSyncError(exceptions.Error): pass class MetaSyncConflict(MetaSyncError): key = None def metasync(storage_a, storage_b, status, keys, conflict_resolution=None): def _a_to_b(): logger.info(f"Copying {key} to {storage_b}") storage_b.set_meta(key, a) status[key] = a def _b_to_a(): logger.info(f"Copying {key} to {storage_a}") storage_a.set_meta(key, b) status[key] = b def _resolve_conflict(): if a == b: status[key] = a elif conflict_resolution == "a wins": _a_to_b() elif conflict_resolution == "b wins": _b_to_a() else: if callable(conflict_resolution): logger.warning("Custom commands don't work on metasync.") elif conflict_resolution is not None: raise exceptions.UserError("Invalid conflict resolution setting.") raise MetaSyncConflict(key) for key in keys: a = storage_a.get_meta(key) b = storage_b.get_meta(key) s = normalize_meta_value(status.get(key)) logger.debug(f"Key: {key}") logger.debug(f"A: {a}") logger.debug(f"B: {b}") logger.debug(f"S: {s}") if a != s and b != s: _resolve_conflict() elif a != s and b == s: _a_to_b() elif a == s and b != s: _b_to_a() else: assert a == b for key in set(status) - set(keys): del status[key] vdirsyncer-0.18.0/vdirsyncer/repair.py000066400000000000000000000037251406140636100200240ustar00rootroot00000000000000import logging from os.path import basename from .utils import generate_href from .utils import href_safe logger = logging.getLogger(__name__) class IrreparableItem(Exception): pass def repair_storage(storage, repair_unsafe_uid): seen_uids = set() all_hrefs = list(storage.list()) for i, (href, _) in enumerate(all_hrefs): item, etag = storage.get(href) logger.info("[{}/{}] Processing {}".format(i, len(all_hrefs), href)) try: new_item = repair_item(href, item, seen_uids, repair_unsafe_uid) except IrreparableItem: logger.error( "Item {!r} is malformed beyond repair. " "The PRODID property may indicate which software " "created this item.".format(href) ) logger.error(f"Item content: {item.raw!r}") continue seen_uids.add(new_item.uid) if new_item.raw != item.raw: if new_item.uid != item.uid: storage.upload(new_item) storage.delete(href, etag) else: storage.update(href, new_item, etag) def repair_item(href, item, seen_uids, repair_unsafe_uid): if item.parsed is None: raise IrreparableItem() new_item = item if not item.uid: logger.warning("No UID, assigning random UID.") new_item = item.with_uid(generate_href()) elif item.uid in seen_uids: logger.warning("Duplicate UID, assigning random UID.") new_item = item.with_uid(generate_href()) elif not href_safe(item.uid) or not href_safe(basename(href)): if not repair_unsafe_uid: logger.warning( "UID may cause problems, add " "--repair-unsafe-uid to repair." ) else: logger.warning("UID or href is unsafe, assigning random UID.") new_item = item.with_uid(generate_href()) if not new_item.uid: raise IrreparableItem() return new_item vdirsyncer-0.18.0/vdirsyncer/storage/000077500000000000000000000000001406140636100176255ustar00rootroot00000000000000vdirsyncer-0.18.0/vdirsyncer/storage/__init__.py000066400000000000000000000004261406140636100217400ustar00rootroot00000000000000""" There are storage classes which control the access to one vdir-collection and offer basic CRUD-ish methods for modifying those collections. The exact interface is described in `vdirsyncer.storage.base`, the `Storage` class should be a superclass of all storage classes. """ vdirsyncer-0.18.0/vdirsyncer/storage/base.py000066400000000000000000000171631406140636100211210ustar00rootroot00000000000000import contextlib import functools from .. import exceptions from ..utils import uniq def mutating_storage_method(f): @functools.wraps(f) def inner(self, *args, **kwargs): if self.read_only: raise exceptions.ReadOnlyError("This storage is read-only.") return f(self, *args, **kwargs) return inner class StorageMeta(type): def __init__(cls, name, bases, d): for method in ("update", "upload", "delete"): setattr(cls, method, mutating_storage_method(getattr(cls, method))) return super().__init__(name, bases, d) class Storage(metaclass=StorageMeta): """Superclass of all storages, interface that all storages have to implement. Terminology: - ITEM: Instance of the Item class, represents a calendar event, task or contact. - HREF: String; Per-storage identifier of item, might be UID. The reason items aren't just referenced by their UID is because the CalDAV and CardDAV specifications make this unperformant to implement. - ETAG: String; Checksum of item, or something similar that changes when the item does. Strings can be either unicode strings or bytestrings. If bytestrings, an ASCII encoding is assumed. :param read_only: Whether the synchronization algorithm should avoid writes to this storage. Some storages accept no value other than ``True``. """ fileext = ".txt" # The string used in the config to denote the type of storage. Should be # overridden by subclasses. storage_name = None # The string used in the config to denote a particular instance. Will be # overridden during instantiation. instance_name = None # The machine-readable name of this collection. collection = None # A value of True means the storage does not support write-methods such as # upload, update and delete. A value of False means the storage does # support those methods. read_only = False # The attribute values to show in the representation of the storage. _repr_attributes = () def __init__(self, instance_name=None, read_only=None, collection=None): if read_only is None: read_only = self.read_only if self.read_only and not read_only: raise exceptions.UserError("This storage can only be read-only.") self.read_only = bool(read_only) if collection and instance_name: instance_name = f"{instance_name}/{collection}" self.instance_name = instance_name self.collection = collection @classmethod def discover(cls, **kwargs): """Discover collections given a basepath or -URL to many collections. :param **kwargs: Keyword arguments to additionally pass to the storage instances returned. You shouldn't pass `collection` here, otherwise TypeError will be raised. :returns: iterable of ``storage_args``. ``storage_args`` is a dictionary of ``**kwargs`` to pass to this class to obtain a storage instance pointing to this collection. It also must contain a ``"collection"`` key. That key's value is used to match two collections together for synchronization. IOW it is a machine-readable identifier for the collection, usually obtained from the last segment of a URL or filesystem path. """ raise NotImplementedError() @classmethod def create_collection(cls, collection, **kwargs): """ Create the specified collection and return the new arguments. ``collection=None`` means the arguments are already pointing to a possible collection location. The returned args should contain the collection name, for UI purposes. """ raise NotImplementedError() def __repr__(self): try: if self.instance_name: return str(self.instance_name) except ValueError: pass return "<{}(**{})>".format( self.__class__.__name__, {x: getattr(self, x) for x in self._repr_attributes}, ) def list(self): """ :returns: list of (href, etag) """ raise NotImplementedError() def get(self, href): """Fetch a single item. :param href: href to fetch :returns: (item, etag) :raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` if item can't be found. """ raise NotImplementedError() def get_multi(self, hrefs): """Fetch multiple items. Duplicate hrefs must be ignored. Functionally similar to :py:meth:`get`, but might bring performance benefits on some storages when used cleverly. :param hrefs: list of hrefs to fetch :raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` if one of the items couldn't be found. :returns: iterable of (href, item, etag) """ for href in uniq(hrefs): item, etag = self.get(href) yield href, item, etag def has(self, href): """Check if an item exists by its href. :returns: True or False """ try: self.get(href) except exceptions.PreconditionFailed: return False else: return True def upload(self, item): """Upload a new item. In cases where the new etag cannot be atomically determined (i.e. in the same "transaction" as the upload itself), this method may return `None` as etag. This special case only exists because of DAV. Avoid this situation whenever possible. :raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` if there is already an item with that href. :returns: (href, etag) """ raise NotImplementedError() def update(self, href, item, etag): """Update an item. The etag may be none in some cases, see `upload`. :raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` if the etag on the server doesn't match the given etag or if the item doesn't exist. :returns: etag """ raise NotImplementedError() def delete(self, href, etag): """Delete an item by href. :raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` when item has a different etag or doesn't exist. """ raise NotImplementedError() @contextlib.contextmanager def at_once(self): """A contextmanager that buffers all writes. Essentially, this:: s.upload(...) s.update(...) becomes this:: with s.at_once(): s.upload(...) s.update(...) Note that this removes guarantees about which exceptions are returned when. """ yield def get_meta(self, key): """Get metadata value for collection/storage. See the vdir specification for the keys that *have* to be accepted. :param key: The metadata key. :type key: unicode """ raise NotImplementedError("This storage does not support metadata.") def set_meta(self, key, value): """Get metadata value for collection/storage. :param key: The metadata key. :type key: unicode :param value: The value. :type value: unicode """ raise NotImplementedError("This storage does not support metadata.") def normalize_meta_value(value): # `None` is returned by iCloud for empty properties. if not value or value == "None": value = "" return value.strip() vdirsyncer-0.18.0/vdirsyncer/storage/dav.py000066400000000000000000000654751406140636100207720ustar00rootroot00000000000000import datetime import logging import urllib.parse as urlparse import xml.etree.ElementTree as etree from inspect import getfullargspec from inspect import signature import requests from requests.exceptions import HTTPError from .. import exceptions from .. import http from .. import utils from ..http import prepare_auth from ..http import prepare_client_cert from ..http import prepare_verify from ..http import USERAGENT from ..vobject import Item from .base import normalize_meta_value from .base import Storage dav_logger = logging.getLogger(__name__) CALDAV_DT_FORMAT = "%Y%m%dT%H%M%SZ" def _generate_path_reserved_chars(): for x in "/?#[]!$&'()*+,;": x = urlparse.quote(x, "") yield x.upper() yield x.lower() _path_reserved_chars = frozenset(_generate_path_reserved_chars()) del _generate_path_reserved_chars def _contains_quoted_reserved_chars(x): for y in _path_reserved_chars: if y in x: dav_logger.debug(f"Unsafe character: {y!r}") return True return False def _assert_multistatus_success(r): # Xandikos returns a multistatus on PUT. try: root = _parse_xml(r.content) except InvalidXMLResponse: return for status in root.findall(".//{DAV:}status"): parts = status.text.strip().split() try: st = int(parts[1]) except (ValueError, IndexError): continue if st < 200 or st >= 400: raise HTTPError(f"Server error: {st}") def _normalize_href(base, href): """Normalize the href to be a path only relative to hostname and schema.""" orig_href = href if not href: raise ValueError(href) x = urlparse.urljoin(base, href) x = urlparse.urlsplit(x).path # Encoding issues: # - https://github.com/owncloud/contacts/issues/581 # - https://github.com/Kozea/Radicale/issues/298 old_x = None while old_x is None or x != old_x: if _contains_quoted_reserved_chars(x): break old_x = x x = urlparse.unquote(x) x = urlparse.quote(x, "/@%:") if orig_href == x: dav_logger.debug(f"Already normalized: {x!r}") else: dav_logger.debug("Normalized URL from {!r} to {!r}".format(orig_href, x)) return x class InvalidXMLResponse(exceptions.InvalidResponse): pass _BAD_XML_CHARS = ( b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x0b\x0c\x0e\x0f" b"\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" ) def _clean_body(content, bad_chars=_BAD_XML_CHARS): new_content = content.translate(None, bad_chars) if new_content != content: dav_logger.warning( "Your server incorrectly returned ASCII control characters in its " "XML. Vdirsyncer ignores those, but this is a bug in your server." ) return new_content def _parse_xml(content): try: return etree.XML(_clean_body(content)) except etree.ParseError as e: raise InvalidXMLResponse( "Invalid XML encountered: {}\n" "Double-check the URLs in your config.".format(e) ) def _merge_xml(items): if not items: return None rv = items[0] for item in items[1:]: rv.extend(item.iter()) return rv def _fuzzy_matches_mimetype(strict, weak): # different servers give different getcontenttypes: # "text/vcard", "text/x-vcard", "text/x-vcard; charset=utf-8", # "text/directory;profile=vCard", "text/directory", # "text/vcard; charset=utf-8" if strict is None or weak is None: return True mediatype, subtype = strict.split("/") if subtype in weak: return True return False class Discover: _namespace = None _resourcetype = None _homeset_xml = None _homeset_tag = None _well_known_uri = None _collection_xml = b""" """ def __init__(self, session, kwargs): if kwargs.pop("collection", None) is not None: raise TypeError("collection argument must not be given.") self.session = session self.kwargs = kwargs @staticmethod def _get_collection_from_url(url): _, collection = url.rstrip("/").rsplit("/", 1) return urlparse.unquote(collection) def find_principal(self): try: return self._find_principal_impl("") except (HTTPError, exceptions.Error): dav_logger.debug("Trying out well-known URI") return self._find_principal_impl(self._well_known_uri) def _find_principal_impl(self, url): headers = self.session.get_default_headers() headers["Depth"] = "0" body = b""" """ response = self.session.request("PROPFIND", url, headers=headers, data=body) root = _parse_xml(response.content) rv = root.find(".//{DAV:}current-user-principal/{DAV:}href") if rv is None: # This is for servers that don't support current-user-principal # E.g. Synology NAS # See https://github.com/pimutils/vdirsyncer/issues/498 dav_logger.debug( "No current-user-principal returned, re-using URL {}".format( response.url ) ) return response.url return urlparse.urljoin(response.url, rv.text).rstrip("/") + "/" def find_home(self): url = self.find_principal() headers = self.session.get_default_headers() headers["Depth"] = "0" response = self.session.request( "PROPFIND", url, headers=headers, data=self._homeset_xml ) root = etree.fromstring(response.content) # Better don't do string formatting here, because of XML namespaces rv = root.find(".//" + self._homeset_tag + "/{DAV:}href") if rv is None: raise InvalidXMLResponse("Couldn't find home-set.") return urlparse.urljoin(response.url, rv.text).rstrip("/") + "/" def find_collections(self): rv = None try: rv = list(self._find_collections_impl("")) except (HTTPError, exceptions.Error): pass if rv: return rv dav_logger.debug("Given URL is not a homeset URL") return self._find_collections_impl(self.find_home()) def _check_collection_resource_type(self, response): if self._resourcetype is None: return True props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop")) if props is None or not len(props): dav_logger.debug("Skipping, missing : %s", response) return False if props.find("{DAV:}resourcetype/" + self._resourcetype) is None: dav_logger.debug( "Skipping, not of resource type %s: %s", self._resourcetype, response ) return False return True def _find_collections_impl(self, url): headers = self.session.get_default_headers() headers["Depth"] = "1" r = self.session.request( "PROPFIND", url, headers=headers, data=self._collection_xml ) root = _parse_xml(r.content) done = set() for response in root.findall("{DAV:}response"): if not self._check_collection_resource_type(response): continue href = response.find("{DAV:}href") if href is None: raise InvalidXMLResponse("Missing href tag for collection " "props.") href = urlparse.urljoin(r.url, href.text) if href not in done: done.add(href) yield {"href": href} def discover(self): for c in self.find_collections(): url = c["href"] collection = self._get_collection_from_url(url) storage_args = dict(self.kwargs) storage_args.update({"url": url, "collection": collection}) yield storage_args def create(self, collection): if collection is None: collection = self._get_collection_from_url(self.kwargs["url"]) for c in self.discover(): if c["collection"] == collection: return c home = self.find_home() url = urlparse.urljoin(home, urlparse.quote(collection, "/@")) try: url = self._create_collection_impl(url) except HTTPError as e: raise NotImplementedError(e) else: rv = dict(self.kwargs) rv["collection"] = collection rv["url"] = url return rv def _create_collection_impl(self, url): data = """ {} """.format( etree.tostring(etree.Element(self._resourcetype), encoding="unicode") ).encode( "utf-8" ) response = self.session.request( "MKCOL", url, data=data, headers=self.session.get_default_headers(), ) return response.url class CalDiscover(Discover): _namespace = "urn:ietf:params:xml:ns:caldav" _resourcetype = "{%s}calendar" % _namespace _homeset_xml = b""" """ _homeset_tag = "{%s}calendar-home-set" % _namespace _well_known_uri = "/.well-known/caldav" class CardDiscover(Discover): _namespace = "urn:ietf:params:xml:ns:carddav" _resourcetype = "{%s}addressbook" % _namespace _homeset_xml = b""" """ _homeset_tag = "{%s}addressbook-home-set" % _namespace _well_known_uri = "/.well-known/carddav" class DAVSession: """ A helper class to connect to DAV servers. """ @classmethod def init_and_remaining_args(cls, **kwargs): argspec = getfullargspec(cls.__init__) self_args, remainder = utils.split_dict(kwargs, argspec.args.__contains__) return cls(**self_args), remainder def __init__( self, url, username="", password="", verify=True, auth=None, useragent=USERAGENT, verify_fingerprint=None, auth_cert=None, ): self._settings = { "cert": prepare_client_cert(auth_cert), "auth": prepare_auth(auth, username, password), } self._settings.update(prepare_verify(verify, verify_fingerprint)) self.useragent = useragent self.url = url.rstrip("/") + "/" self._session = requests.session() @utils.cached_property def parsed_url(self): return urlparse.urlparse(self.url) def request(self, method, path, **kwargs): url = self.url if path: url = urlparse.urljoin(self.url, path) more = dict(self._settings) more.update(kwargs) return http.request(method, url, session=self._session, **more) def get_default_headers(self): return { "User-Agent": self.useragent, "Content-Type": "application/xml; charset=UTF-8", } class DAVStorage(Storage): # the file extension of items. Useful for testing against radicale. fileext = None # mimetype of items item_mimetype = None # XML to use when fetching multiple hrefs. get_multi_template = None # The LXML query for extracting results in get_multi get_multi_data_query = None # The Discover subclass to use discovery_class = None # The DAVSession class to use session_class = DAVSession _repr_attributes = ("username", "url") _property_table = { "displayname": ("displayname", "DAV:"), } def __init__(self, **kwargs): # defined for _repr_attributes self.username = kwargs.get("username") self.url = kwargs.get("url") self.session, kwargs = self.session_class.init_and_remaining_args(**kwargs) super().__init__(**kwargs) __init__.__signature__ = signature(session_class.__init__) @classmethod def discover(cls, **kwargs): session, _ = cls.session_class.init_and_remaining_args(**kwargs) d = cls.discovery_class(session, kwargs) return d.discover() @classmethod def create_collection(cls, collection, **kwargs): session, _ = cls.session_class.init_and_remaining_args(**kwargs) d = cls.discovery_class(session, kwargs) return d.create(collection) def _normalize_href(self, *args, **kwargs): return _normalize_href(self.session.url, *args, **kwargs) def _get_href(self, item): href = utils.generate_href(item.ident) return self._normalize_href(href + self.fileext) def _is_item_mimetype(self, mimetype): return _fuzzy_matches_mimetype(self.item_mimetype, mimetype) def get(self, href): ((actual_href, item, etag),) = self.get_multi([href]) assert href == actual_href return item, etag def get_multi(self, hrefs): hrefs = set(hrefs) href_xml = [] for href in hrefs: if href != self._normalize_href(href): raise exceptions.NotFoundError(href) href_xml.append(f"{href}") if not href_xml: return () data = self.get_multi_template.format(hrefs="\n".join(href_xml)).encode("utf-8") response = self.session.request( "REPORT", "", data=data, headers=self.session.get_default_headers() ) root = _parse_xml(response.content) # etree only can handle bytes rv = [] hrefs_left = set(hrefs) for href, etag, prop in self._parse_prop_responses(root): raw = prop.find(self.get_multi_data_query) if raw is None: dav_logger.warning( "Skipping {}, the item content is missing.".format(href) ) continue raw = raw.text or "" if isinstance(raw, bytes): raw = raw.decode(response.encoding) if isinstance(etag, bytes): etag = etag.decode(response.encoding) try: hrefs_left.remove(href) except KeyError: if href in hrefs: dav_logger.warning("Server sent item twice: {}".format(href)) else: dav_logger.warning("Server sent unsolicited item: {}".format(href)) else: rv.append((href, Item(raw), etag)) for href in hrefs_left: raise exceptions.NotFoundError(href) return rv def _put(self, href, item, etag): headers = self.session.get_default_headers() headers["Content-Type"] = self.item_mimetype if etag is None: headers["If-None-Match"] = "*" else: headers["If-Match"] = etag response = self.session.request( "PUT", href, data=item.raw.encode("utf-8"), headers=headers ) _assert_multistatus_success(response) # The server may not return an etag under certain conditions: # # An origin server MUST NOT send a validator header field (Section # 7.2), such as an ETag or Last-Modified field, in a successful # response to PUT unless the request's representation data was saved # without any transformation applied to the body (i.e., the # resource's new representation data is identical to the # representation data received in the PUT request) and the validator # field value reflects the new representation. # # -- https://tools.ietf.org/html/rfc7231#section-4.3.4 # # In such cases we return a constant etag. The next synchronization # will then detect an etag change and will download the new item. etag = response.headers.get("etag", None) href = self._normalize_href(response.url) return href, etag def update(self, href, item, etag): if etag is None: raise ValueError("etag must be given and must not be None.") href, etag = self._put(self._normalize_href(href), item, etag) return etag def upload(self, item): href = self._get_href(item) return self._put(href, item, None) def delete(self, href, etag): href = self._normalize_href(href) headers = self.session.get_default_headers() headers.update({"If-Match": etag}) self.session.request("DELETE", href, headers=headers) def _parse_prop_responses(self, root, handled_hrefs=None): if handled_hrefs is None: handled_hrefs = set() for response in root.iter("{DAV:}response"): href = response.find("{DAV:}href") if href is None: dav_logger.error("Skipping response, href is missing.") continue href = self._normalize_href(href.text) if href in handled_hrefs: # Servers that send duplicate hrefs: # - Zimbra # https://github.com/pimutils/vdirsyncer/issues/88 # - Davmail # https://github.com/pimutils/vdirsyncer/issues/144 dav_logger.warning("Skipping identical href: {!r}".format(href)) continue props = response.findall("{DAV:}propstat/{DAV:}prop") if props is None or not len(props): dav_logger.debug("Skipping {!r}, properties are missing.".format(href)) continue else: props = _merge_xml(props) if props.find("{DAV:}resourcetype/{DAV:}collection") is not None: dav_logger.debug(f"Skipping {href!r}, is collection.") continue etag = getattr(props.find("{DAV:}getetag"), "text", "") if not etag: dav_logger.debug( "Skipping {!r}, etag property is missing.".format(href) ) continue contenttype = getattr(props.find("{DAV:}getcontenttype"), "text", None) if not self._is_item_mimetype(contenttype): dav_logger.debug( "Skipping {!r}, {!r} != {!r}.".format( href, contenttype, self.item_mimetype ) ) continue handled_hrefs.add(href) yield href, etag, props def list(self): headers = self.session.get_default_headers() headers["Depth"] = "1" data = b""" """ # We use a PROPFIND request instead of addressbook-query due to issues # with Zimbra. See https://github.com/pimutils/vdirsyncer/issues/83 response = self.session.request("PROPFIND", "", data=data, headers=headers) root = _parse_xml(response.content) rv = self._parse_prop_responses(root) for href, etag, _prop in rv: yield href, etag def get_meta(self, key): try: tagname, namespace = self._property_table[key] except KeyError: raise exceptions.UnsupportedMetadataError() xpath = f"{{{namespace}}}{tagname}" data = """ {} """.format( etree.tostring(etree.Element(xpath), encoding="unicode") ).encode( "utf-8" ) headers = self.session.get_default_headers() headers["Depth"] = "0" response = self.session.request("PROPFIND", "", data=data, headers=headers) root = _parse_xml(response.content) for prop in root.findall(".//" + xpath): text = normalize_meta_value(getattr(prop, "text", None)) if text: return text return "" def set_meta(self, key, value): try: tagname, namespace = self._property_table[key] except KeyError: raise exceptions.UnsupportedMetadataError() lxml_selector = f"{{{namespace}}}{tagname}" element = etree.Element(lxml_selector) element.text = normalize_meta_value(value) data = """ {} """.format( etree.tostring(element, encoding="unicode") ).encode( "utf-8" ) self.session.request( "PROPPATCH", "", data=data, headers=self.session.get_default_headers() ) # XXX: Response content is currently ignored. Though exceptions are # raised for HTTP errors, a multistatus with errorcodes inside is not # parsed yet. Not sure how common those are, or how they look like. It # might be easier (and safer in case of a stupid server) to just issue # a PROPFIND to see if the value got actually set. class CalDAVStorage(DAVStorage): storage_name = "caldav" fileext = ".ics" item_mimetype = "text/calendar" discovery_class = CalDiscover start_date = None end_date = None get_multi_template = """ {hrefs} """ get_multi_data_query = "{urn:ietf:params:xml:ns:caldav}calendar-data" _property_table = dict(DAVStorage._property_table) _property_table.update( { "color": ("calendar-color", "http://apple.com/ns/ical/"), } ) def __init__(self, start_date=None, end_date=None, item_types=(), **kwargs): super().__init__(**kwargs) if not isinstance(item_types, (list, tuple)): raise exceptions.UserError("item_types must be a list.") self.item_types = tuple(item_types) if (start_date is None) != (end_date is None): raise exceptions.UserError( "If start_date is given, " "end_date has to be given too." ) elif start_date is not None and end_date is not None: namespace = dict(datetime.__dict__) namespace["start_date"] = self.start_date = ( eval(start_date, namespace) if isinstance(start_date, (bytes, str)) else start_date ) self.end_date = ( eval(end_date, namespace) if isinstance(end_date, (bytes, str)) else end_date ) @staticmethod def _get_list_filters(components, start, end): if components: caldavfilter = """ {timefilter} """ if start is not None and end is not None: start = start.strftime(CALDAV_DT_FORMAT) end = end.strftime(CALDAV_DT_FORMAT) timefilter = ''.format( start=start, end=end ) else: timefilter = "" for component in components: yield caldavfilter.format(component=component, timefilter=timefilter) else: if start is not None and end is not None: yield from CalDAVStorage._get_list_filters( ("VTODO", "VEVENT"), start, end ) def list(self): caldavfilters = list( self._get_list_filters(self.item_types, self.start_date, self.end_date) ) if not caldavfilters: # If we don't have any filters (which is the default), taking the # risk of sending a calendar-query is not necessary. There doesn't # seem to be a widely-usable way to send calendar-queries with the # same semantics as a PROPFIND request... so why not use PROPFIND # instead? # # See https://github.com/dmfs/tasks/issues/118 for backstory. yield from DAVStorage.list(self) data = """ {caldavfilter} """ headers = self.session.get_default_headers() # https://github.com/pimutils/vdirsyncer/issues/166 # The default in CalDAV's calendar-queries is 0, but the examples use # an explicit value of 1 for querying items. it is extremely unclear in # the spec which values from WebDAV are actually allowed. headers["Depth"] = "1" handled_hrefs = set() for caldavfilter in caldavfilters: xml = data.format(caldavfilter=caldavfilter).encode("utf-8") response = self.session.request("REPORT", "", data=xml, headers=headers) root = _parse_xml(response.content) rv = self._parse_prop_responses(root, handled_hrefs) for href, etag, _prop in rv: yield href, etag class CardDAVStorage(DAVStorage): storage_name = "carddav" fileext = ".vcf" item_mimetype = "text/vcard" discovery_class = CardDiscover get_multi_template = """ {hrefs} """ get_multi_data_query = "{urn:ietf:params:xml:ns:carddav}address-data" vdirsyncer-0.18.0/vdirsyncer/storage/etesync.py000066400000000000000000000161761406140636100216640ustar00rootroot00000000000000import binascii import contextlib import functools import logging import os import atomicwrites import click try: import etesync import etesync.exceptions from etesync import AddressBook, Contact, Calendar, Event has_etesync = True except ImportError: has_etesync = False AddressBook = Contact = Calendar = Event = None from .. import exceptions from ..cli.utils import assert_permissions from ..utils import checkdir from ..vobject import Item from .base import Storage logger = logging.getLogger(__name__) def _writing_op(f): @functools.wraps(f) def inner(self, *args, **kwargs): if not self._at_once: self._sync_journal() rv = f(self, *args, **kwargs) if not self._at_once: self._sync_journal() return rv return inner class _Session: def __init__(self, email, secrets_dir, server_url=None, db_path=None): if not has_etesync: raise exceptions.UserError("Dependencies for etesync are not " "installed.") server_url = server_url or etesync.API_URL self.email = email self.secrets_dir = os.path.join(secrets_dir, email + "/") self._auth_token_path = os.path.join(self.secrets_dir, "auth_token") self._key_path = os.path.join(self.secrets_dir, "key") auth_token = self._get_auth_token() if not auth_token: password = click.prompt( "Enter service password for {}".format(self.email), hide_input=True ) auth_token = etesync.Authenticator(server_url).get_auth_token( self.email, password ) self._set_auth_token(auth_token) self._db_path = db_path or os.path.join(self.secrets_dir, "db.sqlite") self.etesync = etesync.EteSync( email, auth_token, remote=server_url, db_path=self._db_path ) key = self._get_key() if not key: password = click.prompt("Enter key password", hide_input=True) click.echo(f"Deriving key for {self.email}") self.etesync.derive_key(password) self._set_key(self.etesync.cipher_key) else: self.etesync.cipher_key = key def _get_auth_token(self): try: with open(self._auth_token_path) as f: return f.read().strip() or None except OSError: pass def _set_auth_token(self, token): checkdir(os.path.dirname(self._auth_token_path), create=True) with atomicwrites.atomic_write(self._auth_token_path) as f: f.write(token) assert_permissions(self._auth_token_path, 0o600) def _get_key(self): try: with open(self._key_path, "rb") as f: return f.read() except OSError: pass def _set_key(self, content): checkdir(os.path.dirname(self._key_path), create=True) with atomicwrites.atomic_write(self._key_path, mode="wb") as f: f.write(content) assert_permissions(self._key_path, 0o600) class EtesyncStorage(Storage): _collection_type = None _item_type = None _at_once = False def __init__(self, email, secrets_dir, server_url=None, db_path=None, **kwargs): if kwargs.get("collection", None) is None: raise ValueError("Collection argument required") self._session = _Session(email, secrets_dir, server_url, db_path) super().__init__(**kwargs) self._journal = self._session.etesync.get(self.collection) def _sync_journal(self): self._session.etesync.sync_journal(self.collection) @classmethod def discover(cls, email, secrets_dir, server_url=None, db_path=None, **kwargs): if kwargs.get("collection", None) is not None: raise TypeError("collection argument must not be given.") session = _Session(email, secrets_dir, server_url, db_path) assert cls._collection_type session.etesync.sync_journal_list() for entry in session.etesync.list(): if isinstance(entry.collection, cls._collection_type): yield dict( email=email, secrets_dir=secrets_dir, db_path=db_path, collection=entry.uid, **kwargs, ) else: logger.debug(f"Skipping collection: {entry!r}") @classmethod def create_collection( cls, collection, email, secrets_dir, server_url=None, db_path=None, **kwargs ): session = _Session(email, secrets_dir, server_url, db_path) content = {"displayName": collection} c = cls._collection_type.create( session.etesync, binascii.hexlify(os.urandom(32)).decode(), content ) c.save() session.etesync.sync_journal_list() return dict( collection=c.journal.uid, email=email, secrets_dir=secrets_dir, db_path=db_path, server_url=server_url, **kwargs, ) def list(self): self._sync_journal() for entry in self._journal.collection.list(): item = Item(entry.content) yield str(entry.uid), item.hash def get(self, href): try: item = Item(self._journal.collection.get(href).content) except etesync.exceptions.DoesNotExist as e: raise exceptions.NotFoundError(e) return item, item.hash @_writing_op def upload(self, item): try: entry = self._item_type.create(self._journal.collection, item.raw) entry.save() except etesync.exceptions.DoesNotExist as e: raise exceptions.NotFoundError(e) except etesync.exceptions.AlreadyExists as e: raise exceptions.AlreadyExistingError(e) return item.uid, item.hash @_writing_op def update(self, href, item, etag): try: entry = self._journal.collection.get(href) except etesync.exceptions.DoesNotExist as e: raise exceptions.NotFoundError(e) old_item = Item(entry.content) if old_item.hash != etag: raise exceptions.WrongEtagError(etag, old_item.hash) entry.content = item.raw entry.save() return item.hash @_writing_op def delete(self, href, etag): try: entry = self._journal.collection.get(href) old_item = Item(entry.content) if old_item.hash != etag: raise exceptions.WrongEtagError(etag, old_item.hash) entry.delete() except etesync.exceptions.DoesNotExist as e: raise exceptions.NotFoundError(e) @contextlib.contextmanager def at_once(self): self._sync_journal() self._at_once = True try: yield self self._sync_journal() finally: self._at_once = False class EtesyncContacts(EtesyncStorage): _collection_type = AddressBook _item_type = Contact storage_name = "etesync_contacts" class EtesyncCalendars(EtesyncStorage): _collection_type = Calendar _item_type = Event storage_name = "etesync_calendars" vdirsyncer-0.18.0/vdirsyncer/storage/filesystem.py000066400000000000000000000140531406140636100223660ustar00rootroot00000000000000import errno import logging import os import subprocess from atomicwrites import atomic_write from .. import exceptions from ..utils import checkdir from ..utils import expand_path from ..utils import generate_href from ..utils import get_etag_from_file from ..vobject import Item from .base import normalize_meta_value from .base import Storage logger = logging.getLogger(__name__) class FilesystemStorage(Storage): storage_name = "filesystem" _repr_attributes = ("path",) def __init__( self, path, fileext, encoding="utf-8", post_hook=None, fileignoreext=".tmp", **kwargs ): super().__init__(**kwargs) path = expand_path(path) checkdir(path, create=False) self.path = path self.encoding = encoding self.fileext = fileext self.fileignoreext = fileignoreext self.post_hook = post_hook @classmethod def discover(cls, path, **kwargs): if kwargs.pop("collection", None) is not None: raise TypeError("collection argument must not be given.") path = expand_path(path) try: collections = os.listdir(path) except OSError as e: if e.errno != errno.ENOENT: raise else: for collection in collections: collection_path = os.path.join(path, collection) if not cls._validate_collection(collection_path): continue args = dict(collection=collection, path=collection_path, **kwargs) yield args @classmethod def _validate_collection(cls, path): if not os.path.isdir(path): return False if os.path.basename(path).startswith("."): return False return True @classmethod def create_collection(cls, collection, **kwargs): kwargs = dict(kwargs) path = kwargs["path"] if collection is not None: path = os.path.join(path, collection) checkdir(expand_path(path), create=True) kwargs["path"] = path kwargs["collection"] = collection return kwargs def _get_filepath(self, href): return os.path.join(self.path, href) def _get_href(self, ident): return generate_href(ident) + self.fileext def list(self): for fname in os.listdir(self.path): fpath = os.path.join(self.path, fname) if ( os.path.isfile(fpath) and fname.endswith(self.fileext) and (not fname.endswith(self.fileignoreext)) ): yield fname, get_etag_from_file(fpath) def get(self, href): fpath = self._get_filepath(href) try: with open(fpath, "rb") as f: return (Item(f.read().decode(self.encoding)), get_etag_from_file(fpath)) except OSError as e: if e.errno == errno.ENOENT: raise exceptions.NotFoundError(href) else: raise def upload(self, item): if not isinstance(item.raw, str): raise TypeError("item.raw must be a unicode string.") try: href = self._get_href(item.ident) fpath, etag = self._upload_impl(item, href) except OSError as e: if e.errno in (errno.ENAMETOOLONG, errno.ENOENT): # Unix # Windows logger.debug("UID as filename rejected, trying with random " "one.") # random href instead of UID-based href = self._get_href(None) fpath, etag = self._upload_impl(item, href) else: raise if self.post_hook: self._run_post_hook(fpath) return href, etag def _upload_impl(self, item, href): fpath = self._get_filepath(href) try: with atomic_write(fpath, mode="wb", overwrite=False) as f: f.write(item.raw.encode(self.encoding)) return fpath, get_etag_from_file(f) except OSError as e: if e.errno == errno.EEXIST: raise exceptions.AlreadyExistingError(existing_href=href) else: raise def update(self, href, item, etag): fpath = self._get_filepath(href) if not os.path.exists(fpath): raise exceptions.NotFoundError(item.uid) actual_etag = get_etag_from_file(fpath) if etag != actual_etag: raise exceptions.WrongEtagError(etag, actual_etag) if not isinstance(item.raw, str): raise TypeError("item.raw must be a unicode string.") with atomic_write(fpath, mode="wb", overwrite=True) as f: f.write(item.raw.encode(self.encoding)) etag = get_etag_from_file(f) if self.post_hook: self._run_post_hook(fpath) return etag def delete(self, href, etag): fpath = self._get_filepath(href) if not os.path.isfile(fpath): raise exceptions.NotFoundError(href) actual_etag = get_etag_from_file(fpath) if etag != actual_etag: raise exceptions.WrongEtagError(etag, actual_etag) os.remove(fpath) def _run_post_hook(self, fpath): logger.info( "Calling post_hook={} with argument={}".format(self.post_hook, fpath) ) try: subprocess.call([self.post_hook, fpath]) except OSError as e: logger.warning("Error executing external hook: {}".format(str(e))) def get_meta(self, key): fpath = os.path.join(self.path, key) try: with open(fpath, "rb") as f: return normalize_meta_value(f.read().decode(self.encoding)) except OSError as e: if e.errno == errno.ENOENT: return "" else: raise def set_meta(self, key, value): value = normalize_meta_value(value) fpath = os.path.join(self.path, key) with atomic_write(fpath, mode="wb", overwrite=True) as f: f.write(value.encode(self.encoding)) vdirsyncer-0.18.0/vdirsyncer/storage/google.py000066400000000000000000000136771406140636100214710ustar00rootroot00000000000000import json import logging import os import urllib.parse as urlparse import click from atomicwrites import atomic_write from click_threading import get_ui_worker from . import base from . import dav from .. import exceptions from ..utils import checkdir from ..utils import expand_path from ..utils import open_graphical_browser logger = logging.getLogger(__name__) TOKEN_URL = "https://accounts.google.com/o/oauth2/v2/auth" REFRESH_URL = "https://www.googleapis.com/oauth2/v4/token" try: from requests_oauthlib import OAuth2Session have_oauth2 = True except ImportError: have_oauth2 = False class GoogleSession(dav.DAVSession): def __init__(self, token_file, client_id, client_secret, url=None): # Required for discovering collections if url is not None: self.url = url self.useragent = client_id self._settings = {} if not have_oauth2: raise exceptions.UserError("requests-oauthlib not installed") token_file = expand_path(token_file) ui_worker = get_ui_worker() ui_worker.put(lambda: self._init_token(token_file, client_id, client_secret)) def _init_token(self, token_file, client_id, client_secret): token = None try: with open(token_file) as f: token = json.load(f) except OSError: pass except ValueError as e: raise exceptions.UserError( "Failed to load token file {}, try deleting it. " "Original error: {}".format(token_file, e) ) def _save_token(token): checkdir(expand_path(os.path.dirname(token_file)), create=True) with atomic_write(token_file, mode="w", overwrite=True) as f: json.dump(token, f) self._session = OAuth2Session( client_id=client_id, token=token, redirect_uri="urn:ietf:wg:oauth:2.0:oob", scope=self.scope, auto_refresh_url=REFRESH_URL, auto_refresh_kwargs={ "client_id": client_id, "client_secret": client_secret, }, token_updater=_save_token, ) if not token: authorization_url, state = self._session.authorization_url( TOKEN_URL, # access_type and approval_prompt are Google specific # extra parameters. access_type="offline", approval_prompt="force", ) click.echo(f"Opening {authorization_url} ...") try: open_graphical_browser(authorization_url) except Exception as e: logger.warning(str(e)) click.echo("Follow the instructions on the page.") code = click.prompt("Paste obtained code") token = self._session.fetch_token( REFRESH_URL, code=code, # Google specific extra parameter used for client # authentication client_secret=client_secret, ) # FIXME: Ugly _save_token(token) class GoogleCalendarStorage(dav.CalDAVStorage): class session_class(GoogleSession): url = "https://apidata.googleusercontent.com/caldav/v2/" scope = ["https://www.googleapis.com/auth/calendar"] class discovery_class(dav.CalDiscover): @staticmethod def _get_collection_from_url(url): # Google CalDAV has collection URLs like: # /user/foouser/calendars/foocalendar/events/ parts = url.rstrip("/").split("/") parts.pop() collection = parts.pop() return urlparse.unquote(collection) storage_name = "google_calendar" def __init__( self, token_file, client_id, client_secret, start_date=None, end_date=None, item_types=(), **kwargs, ): if not kwargs.get("collection"): raise exceptions.CollectionRequired() super().__init__( token_file=token_file, client_id=client_id, client_secret=client_secret, start_date=start_date, end_date=end_date, item_types=item_types, **kwargs, ) # This is ugly: We define/override the entire signature computed for the # docs here because the current way we autogenerate those docs are too # simple for our advanced argspec juggling in `vdirsyncer.storage.dav`. __init__._traverse_superclass = base.Storage class GoogleContactsStorage(dav.CardDAVStorage): class session_class(GoogleSession): # Google CardDAV is completely bonkers. Collection discovery doesn't # work properly, well-known URI takes us directly to single collection # from where we can't discover principal or homeset URIs (the PROPFINDs # 404). # # So we configure the well-known URI here again, such that discovery # tries collection enumeration on it directly. That appears to work. url = "https://www.googleapis.com/.well-known/carddav" scope = ["https://www.googleapis.com/auth/carddav"] class discovery_class(dav.CardDAVStorage.discovery_class): # Google CardDAV doesn't return any resourcetype prop. _resourcetype = None storage_name = "google_contacts" def __init__(self, token_file, client_id, client_secret, **kwargs): if not kwargs.get("collection"): raise exceptions.CollectionRequired() super().__init__( token_file=token_file, client_id=client_id, client_secret=client_secret, **kwargs, ) # This is ugly: We define/override the entire signature computed for the # docs here because the current way we autogenerate those docs are too # simple for our advanced argspec juggling in `vdirsyncer.storage.dav`. __init__._traverse_superclass = base.Storage vdirsyncer-0.18.0/vdirsyncer/storage/http.py000066400000000000000000000040041406140636100211540ustar00rootroot00000000000000import urllib.parse as urlparse from .. import exceptions from ..http import prepare_auth from ..http import prepare_client_cert from ..http import prepare_verify from ..http import request from ..http import USERAGENT from ..vobject import Item from ..vobject import split_collection from .base import Storage class HttpStorage(Storage): storage_name = "http" read_only = True _repr_attributes = ("username", "url") _items = None # Required for tests. _ignore_uids = True def __init__( self, url, username="", password="", verify=True, auth=None, useragent=USERAGENT, verify_fingerprint=None, auth_cert=None, **kwargs ): super().__init__(**kwargs) self._settings = { "auth": prepare_auth(auth, username, password), "cert": prepare_client_cert(auth_cert), "latin1_fallback": False, } self._settings.update(prepare_verify(verify, verify_fingerprint)) self.username, self.password = username, password self.useragent = useragent collection = kwargs.get("collection") if collection is not None: url = urlparse.urljoin(url, collection) self.url = url self.parsed_url = urlparse.urlparse(self.url) def _default_headers(self): return {"User-Agent": self.useragent} def list(self): r = request("GET", self.url, headers=self._default_headers(), **self._settings) self._items = {} for item in split_collection(r.text): item = Item(item) if self._ignore_uids: item = item.with_uid(item.hash) self._items[item.ident] = item, item.hash return ((href, etag) for href, (item, etag) in self._items.items()) def get(self, href): if self._items is None: self.list() try: return self._items[href] except KeyError: raise exceptions.NotFoundError(href) vdirsyncer-0.18.0/vdirsyncer/storage/memory.py000066400000000000000000000037131406140636100215130ustar00rootroot00000000000000import random from .. import exceptions from .base import normalize_meta_value from .base import Storage def _random_string(): return f"{random.random():.9f}" class MemoryStorage(Storage): storage_name = "memory" """ Saves data in RAM, only useful for testing. """ def __init__(self, fileext="", **kwargs): if kwargs.get("collection") is not None: raise exceptions.UserError("MemoryStorage does not support " "collections.") self.items = {} # href => (etag, item) self.metadata = {} self.fileext = fileext super().__init__(**kwargs) def _get_href(self, item): return item.ident + self.fileext def list(self): for href, (etag, _item) in self.items.items(): yield href, etag def get(self, href): etag, item = self.items[href] return item, etag def has(self, href): return href in self.items def upload(self, item): href = self._get_href(item) if href in self.items: raise exceptions.AlreadyExistingError(existing_href=href) etag = _random_string() self.items[href] = (etag, item) return href, etag def update(self, href, item, etag): if href not in self.items: raise exceptions.NotFoundError(href) actual_etag, _ = self.items[href] if etag != actual_etag: raise exceptions.WrongEtagError(etag, actual_etag) new_etag = _random_string() self.items[href] = (new_etag, item) return new_etag def delete(self, href, etag): if not self.has(href): raise exceptions.NotFoundError(href) if etag != self.items[href][0]: raise exceptions.WrongEtagError(etag) del self.items[href] def get_meta(self, key): return normalize_meta_value(self.metadata.get(key)) def set_meta(self, key, value): self.metadata[key] = normalize_meta_value(value) vdirsyncer-0.18.0/vdirsyncer/storage/singlefile.py000066400000000000000000000126251406140636100223260ustar00rootroot00000000000000import collections import contextlib import functools import glob import logging import os from atomicwrites import atomic_write from .. import exceptions from ..utils import checkfile from ..utils import expand_path from ..utils import get_etag_from_file from ..vobject import Item from ..vobject import join_collection from ..vobject import split_collection from .base import Storage logger = logging.getLogger(__name__) def _writing_op(f): @functools.wraps(f) def inner(self, *args, **kwargs): if self._items is None or not self._at_once: self.list() rv = f(self, *args, **kwargs) if not self._at_once: self._write() return rv return inner class SingleFileStorage(Storage): storage_name = "singlefile" _repr_attributes = ("path",) _write_mode = "wb" _append_mode = "ab" _read_mode = "rb" _items = None _last_etag = None def __init__(self, path, encoding="utf-8", **kwargs): super().__init__(**kwargs) path = os.path.abspath(expand_path(path)) checkfile(path, create=False) self.path = path self.encoding = encoding self._at_once = False @classmethod def discover(cls, path, **kwargs): if kwargs.pop("collection", None) is not None: raise TypeError("collection argument must not be given.") path = os.path.abspath(expand_path(path)) try: path_glob = path % "*" except TypeError: # If not exactly one '%s' is present, we cannot discover # collections because we wouldn't know which name to assign. raise NotImplementedError() placeholder_pos = path.index("%s") for subpath in glob.iglob(path_glob): if os.path.isfile(subpath): args = dict(kwargs) args["path"] = subpath collection_end = ( placeholder_pos + 2 + len(subpath) - len(path) # length of '%s' ) collection = subpath[placeholder_pos:collection_end] args["collection"] = collection yield args @classmethod def create_collection(cls, collection, **kwargs): path = os.path.abspath(expand_path(kwargs["path"])) if collection is not None: try: path = path % (collection,) except TypeError: raise ValueError( "Exactly one %s required in path " "if collection is not null." ) checkfile(path, create=True) kwargs["path"] = path kwargs["collection"] = collection return kwargs def list(self): self._items = collections.OrderedDict() try: self._last_etag = get_etag_from_file(self.path) with open(self.path, self._read_mode) as f: text = f.read().decode(self.encoding) except OSError as e: import errno if e.errno != errno.ENOENT: # file not found raise OSError(e) text = None if not text: return () for item in split_collection(text): item = Item(item) etag = item.hash self._items[item.ident] = item, etag return ((href, etag) for href, (item, etag) in self._items.items()) def get(self, href): if self._items is None or not self._at_once: self.list() try: return self._items[href] except KeyError: raise exceptions.NotFoundError(href) @_writing_op def upload(self, item): href = item.ident if href in self._items: raise exceptions.AlreadyExistingError(existing_href=href) self._items[href] = item, item.hash return href, item.hash @_writing_op def update(self, href, item, etag): if href not in self._items: raise exceptions.NotFoundError(href) _, actual_etag = self._items[href] if etag != actual_etag: raise exceptions.WrongEtagError(etag, actual_etag) self._items[href] = item, item.hash return item.hash @_writing_op def delete(self, href, etag): if href not in self._items: raise exceptions.NotFoundError(href) _, actual_etag = self._items[href] if etag != actual_etag: raise exceptions.WrongEtagError(etag, actual_etag) del self._items[href] def _write(self): if self._last_etag is not None and self._last_etag != get_etag_from_file( self.path ): raise exceptions.PreconditionFailed( ( "Some other program modified the file {!r}. Re-run the " "synchronization and make sure absolutely no other program is " "writing into the same file." ).format(self.path) ) text = join_collection(item.raw for item, etag in self._items.values()) try: with atomic_write(self.path, mode="wb", overwrite=True) as f: f.write(text.encode(self.encoding)) finally: self._items = None self._last_etag = None @contextlib.contextmanager def at_once(self): self.list() self._at_once = True try: yield self self._write() finally: self._at_once = False vdirsyncer-0.18.0/vdirsyncer/sync/000077500000000000000000000000001406140636100171355ustar00rootroot00000000000000vdirsyncer-0.18.0/vdirsyncer/sync/__init__.py000066400000000000000000000257721406140636100212630ustar00rootroot00000000000000""" The `sync` function in `vdirsyncer.sync` can be called on two instances of `Storage` to synchronize them. Apart from the defined errors, this is the only public API of this module. The algorithm is based on the blogpost "How OfflineIMAP works" by Edward Z. Yang: http://blog.ezyang.com/2012/08/how-offlineimap-works/ Some modifications to it are explained in https://unterwaditzer.net/2016/sync-algorithm.html """ import contextlib import itertools import logging from ..exceptions import UserError from ..utils import uniq from .exceptions import BothReadOnly from .exceptions import IdentAlreadyExists from .exceptions import PartialSync from .exceptions import StorageEmpty from .exceptions import SyncConflict from .status import ItemMetadata from .status import SubStatus sync_logger = logging.getLogger(__name__) class _StorageInfo: """A wrapper class that holds prefetched items, the status and other things.""" def __init__(self, storage, status): self.storage = storage self.status = status self._item_cache = {} def prepare_new_status(self): storage_nonempty = False prefetch = [] def _store_props(ident, props): try: self.status.insert_ident(ident, props) except IdentAlreadyExists as e: raise e.to_ident_conflict(self.storage) for href, etag in self.storage.list(): storage_nonempty = True ident, meta = self.status.get_by_href(href) if meta is None or meta.href != href or meta.etag != etag: # Either the item is completely new, or updated # In both cases we should prefetch prefetch.append(href) else: # Metadata is completely identical _store_props(ident, meta) # Prefetch items for href, item, etag in self.storage.get_multi(prefetch) if prefetch else (): _store_props(item.ident, ItemMetadata(href=href, hash=item.hash, etag=etag)) self.set_item_cache(item.ident, item) return storage_nonempty def is_changed(self, ident): old_meta = self.status.get(ident) if old_meta is None: # new item return True new_meta = self.status.get_new(ident) return ( new_meta.etag != old_meta.etag # etag changed # item actually changed and (old_meta.hash is None or new_meta.hash != old_meta.hash) ) def set_item_cache(self, ident, item): actual_hash = self.status.get_new(ident).hash assert actual_hash == item.hash self._item_cache[ident] = item def get_item_cache(self, ident): return self._item_cache[ident] def sync( storage_a, storage_b, status, conflict_resolution=None, force_delete=False, error_callback=None, partial_sync="revert", ): """Synchronizes two storages. :param storage_a: The first storage :type storage_a: :class:`vdirsyncer.storage.base.Storage` :param storage_b: The second storage :type storage_b: :class:`vdirsyncer.storage.base.Storage` :param status: {ident: (href_a, etag_a, href_b, etag_b)} metadata about the two storages for detection of changes. Will be modified by the function and should be passed to it at the next sync. If this is the first sync, an empty dictionary should be provided. :param conflict_resolution: A function that, given two conflicting item versions A and B, returns a new item with conflicts resolved. The UID must be the same. The strings `"a wins"` and `"b wins"` are also accepted to mean that side's version will always be taken. If none is provided, the sync function will raise :py:exc:`SyncConflict`. :param force_delete: When one storage got completely emptied between two syncs, :py:exc:`StorageEmpty` is raised for safety. Setting this parameter to ``True`` disables this safety measure. :param error_callback: Instead of raising errors when executing actions, call the given function with an `Exception` as the only argument. :param partial_sync: What to do when doing sync actions on read-only storages. - ``error``: Raise an error. - ``ignore``: Those actions are simply skipped. - ``revert`` (default): Revert changes on other side. """ if storage_a.read_only and storage_b.read_only: raise BothReadOnly() if conflict_resolution == "a wins": conflict_resolution = lambda a, b: a # noqa: E731 elif conflict_resolution == "b wins": conflict_resolution = lambda a, b: b # noqa: E731 status_nonempty = bool(next(status.iter_old(), None)) with status.transaction(): a_info = _StorageInfo(storage_a, SubStatus(status, "a")) b_info = _StorageInfo(storage_b, SubStatus(status, "b")) a_nonempty = a_info.prepare_new_status() b_nonempty = b_info.prepare_new_status() if status_nonempty and not force_delete: if a_nonempty and not b_nonempty: raise StorageEmpty(empty_storage=storage_b) elif not a_nonempty and b_nonempty: raise StorageEmpty(empty_storage=storage_a) actions = list(_get_actions(a_info, b_info)) with storage_a.at_once(), storage_b.at_once(): for action in actions: try: action.run(a_info, b_info, conflict_resolution, partial_sync) except Exception as e: if error_callback: error_callback(e) else: raise class Action: def _run_impl(self, a, b): # pragma: no cover raise NotImplementedError() def run(self, a, b, conflict_resolution, partial_sync): with self.auto_rollback(a, b): if self.dest.storage.read_only: if partial_sync == "error": raise PartialSync(self.dest.storage) elif partial_sync == "ignore": self.rollback(a, b) return else: assert partial_sync == "revert" self._run_impl(a, b) @contextlib.contextmanager def auto_rollback(self, a, b): try: yield except BaseException as e: self.rollback(a, b) raise e def rollback(self, a, b): a.status.parent.rollback(self.ident) class Upload(Action): def __init__(self, item, dest): self.item = item self.ident = item.ident self.dest = dest def _run_impl(self, a, b): if self.dest.storage.read_only: href = etag = None else: sync_logger.info( "Copying (uploading) item {} to {}".format( self.ident, self.dest.storage ) ) href, etag = self.dest.storage.upload(self.item) assert href is not None self.dest.status.insert_ident( self.ident, ItemMetadata(href=href, hash=self.item.hash, etag=etag) ) class Update(Action): def __init__(self, item, dest): self.item = item self.ident = item.ident self.dest = dest def _run_impl(self, a, b): if self.dest.storage.read_only: meta = ItemMetadata(hash=self.item.hash) else: sync_logger.info( "Copying (updating) item {} to {}".format(self.ident, self.dest.storage) ) meta = self.dest.status.get_new(self.ident) meta.etag = self.dest.storage.update(meta.href, self.item, meta.etag) self.dest.status.update_ident(self.ident, meta) class Delete(Action): def __init__(self, ident, dest): self.ident = ident self.dest = dest def _run_impl(self, a, b): meta = self.dest.status.get_new(self.ident) if not self.dest.storage.read_only: sync_logger.info( "Deleting item {} from {}".format(self.ident, self.dest.storage) ) self.dest.storage.delete(meta.href, meta.etag) self.dest.status.remove_ident(self.ident) class ResolveConflict(Action): def __init__(self, ident): self.ident = ident def run(self, a, b, conflict_resolution, partial_sync): with self.auto_rollback(a, b): sync_logger.info( "Doing conflict resolution for item {}...".format(self.ident) ) meta_a = a.status.get_new(self.ident) meta_b = b.status.get_new(self.ident) if meta_a.hash == meta_b.hash: sync_logger.info("...same content on both sides.") elif conflict_resolution is None: raise SyncConflict( ident=self.ident, href_a=meta_a.href, href_b=meta_b.href ) elif callable(conflict_resolution): item_a = a.get_item_cache(self.ident) item_b = b.get_item_cache(self.ident) new_item = conflict_resolution(item_a, item_b) if new_item.hash != meta_a.hash: Update(new_item, a).run(a, b, conflict_resolution, partial_sync) if new_item.hash != meta_b.hash: Update(new_item, b).run(a, b, conflict_resolution, partial_sync) else: raise UserError( "Invalid conflict resolution mode: {!r}".format(conflict_resolution) ) def _get_actions(a_info, b_info): for ident in uniq( itertools.chain( a_info.status.parent.iter_new(), a_info.status.parent.iter_old() ) ): a = a_info.status.get_new(ident) b = b_info.status.get_new(ident) if a and b: a_changed = a_info.is_changed(ident) b_changed = b_info.is_changed(ident) if a_changed and b_changed: # item was modified on both sides # OR: missing status yield ResolveConflict(ident) elif a_changed and not b_changed: # item was only modified in a yield Update(a_info.get_item_cache(ident), b_info) elif not a_changed and b_changed: # item was only modified in b yield Update(b_info.get_item_cache(ident), a_info) elif a and not b: if a_info.is_changed(ident): # was deleted from b but modified on a # OR: new item was created in a yield Upload(a_info.get_item_cache(ident), b_info) else: # was deleted from b and not modified on a yield Delete(ident, a_info) elif not a and b: if b_info.is_changed(ident): # was deleted from a but modified on b # OR: new item was created in b yield Upload(b_info.get_item_cache(ident), a_info) else: # was deleted from a and not changed on b yield Delete(ident, b_info) vdirsyncer-0.18.0/vdirsyncer/sync/exceptions.py000066400000000000000000000033631406140636100216750ustar00rootroot00000000000000from .. import exceptions class SyncError(exceptions.Error): """Errors related to synchronization.""" class SyncConflict(SyncError): """ Two items changed since the last sync, they now have different contents and no conflict resolution method was given. :param ident: The ident of the item. :param href_a: The item's href on side A. :param href_b: The item's href on side B. """ ident = None href_a = None href_b = None class IdentConflict(SyncError): """ Multiple items on the same storage have the same UID. :param storage: The affected storage. :param hrefs: List of affected hrefs on `storage`. """ storage = None _hrefs = None @property def hrefs(self): return self._hrefs @hrefs.setter def hrefs(self, val): new_val = set(val) assert len(new_val) > 1, val self._hrefs = new_val class StorageEmpty(SyncError): """ One storage unexpectedly got completely empty between two synchronizations. The first argument is the empty storage. :param empty_storage: The empty :py:class:`vdirsyncer.storage.base.Storage`. """ empty_storage = None class BothReadOnly(SyncError): """ Both storages are marked as read-only. Synchronization is therefore not possible. """ class PartialSync(SyncError): """ Attempted change on read-only storage. """ storage = None class IdentAlreadyExists(SyncError): """Like IdentConflict, but for internal state. If this bubbles up, we don't have a data race, but a bug.""" old_href = None new_href = None def to_ident_conflict(self, storage): return IdentConflict(storage=storage, hrefs=[self.old_href, self.new_href]) vdirsyncer-0.18.0/vdirsyncer/sync/status.py000066400000000000000000000263521406140636100210420ustar00rootroot00000000000000import abc import contextlib import sqlite3 import sys from .exceptions import IdentAlreadyExists @contextlib.contextmanager def _exclusive_transaction(conn): c = None try: c = conn.execute("BEGIN EXCLUSIVE TRANSACTION") yield c c.execute("COMMIT") except BaseException: if c is None: raise _, e, tb = sys.exc_info() c.execute("ROLLBACK") raise e.with_traceback(tb) class _StatusBase(metaclass=abc.ABCMeta): def load_legacy_status(self, status): with self.transaction(): for ident, metadata in status.items(): if len(metadata) == 4: href_a, etag_a, href_b, etag_b = metadata props_a = ItemMetadata(href=href_a, hash="UNDEFINED", etag=etag_a) props_b = ItemMetadata(href=href_b, hash="UNDEFINED", etag=etag_b) else: a, b = metadata a.setdefault("hash", "UNDEFINED") b.setdefault("hash", "UNDEFINED") props_a = ItemMetadata(**a) props_b = ItemMetadata(**b) self.insert_ident_a(ident, props_a) self.insert_ident_b(ident, props_b) def to_legacy_status(self): for ident in self.iter_old(): a = self.get_a(ident) b = self.get_b(ident) yield ident, (a.to_status(), b.to_status()) @abc.abstractmethod def transaction(self): raise NotImplementedError() @abc.abstractmethod def insert_ident_a(self, ident, props): raise NotImplementedError() @abc.abstractmethod def insert_ident_b(self, ident, props): raise NotImplementedError() @abc.abstractmethod def update_ident_a(self, ident, props): raise NotImplementedError() @abc.abstractmethod def update_ident_b(self, ident, props): raise NotImplementedError() @abc.abstractmethod def remove_ident(self, ident): raise NotImplementedError() @abc.abstractmethod def get_a(self, ident): raise NotImplementedError() @abc.abstractmethod def get_b(self, ident): raise NotImplementedError() @abc.abstractmethod def get_new_a(self, ident): raise NotImplementedError() @abc.abstractmethod def get_new_b(self, ident): raise NotImplementedError() @abc.abstractmethod def iter_old(self): raise NotImplementedError() @abc.abstractmethod def iter_new(self): raise NotImplementedError() @abc.abstractmethod def get_by_href_a(self, href, default=(None, None)): raise NotImplementedError() @abc.abstractmethod def get_by_href_b(self, href, default=(None, None)): raise NotImplementedError() @abc.abstractmethod def rollback(self, ident): raise NotImplementedError() class SqliteStatus(_StatusBase): SCHEMA_VERSION = 1 def __init__(self, path=":memory:"): self._path = path self._c = sqlite3.connect(path) self._c.isolation_level = None # turn off idiocy of DB-API self._c.row_factory = sqlite3.Row self._update_schema() def _update_schema(self): if self._is_latest_version(): return # If we ever bump the schema version, we will need a way to migrate # data. with _exclusive_transaction(self._c) as c: c.execute('CREATE TABLE meta ( "version" INTEGER PRIMARY KEY )') c.execute("INSERT INTO meta (version) VALUES (?)", (self.SCHEMA_VERSION,)) # I know that this is a bad schema, but right there is just too # little gain in deduplicating the .._a and .._b columns. c.execute( """CREATE TABLE status ( "ident" TEXT PRIMARY KEY NOT NULL, "href_a" TEXT, "href_b" TEXT, "hash_a" TEXT NOT NULL, "hash_b" TEXT NOT NULL, "etag_a" TEXT, "etag_b" TEXT ); """ ) c.execute("CREATE UNIQUE INDEX by_href_a ON status(href_a)") c.execute("CREATE UNIQUE INDEX by_href_b ON status(href_b)") # We cannot add NOT NULL here because data is first fetched for the # storage a, then storage b. Inbetween the `_b`-columns are filled # with NULL. # # In an ideal world we would be able to start a transaction with # one cursor, write our new data into status and simultaneously # query the old status data using a different cursor. # Unfortunately sqlite enforces NOT NULL constraints immediately, # not just at commit. Since there is also no way to alter # constraints on a table (disable constraints on start of # transaction and reenable on end), it's a separate table now that # just gets copied over before we commit. That's a lot of copying, # sadly. c.execute( """CREATE TABLE new_status ( "ident" TEXT PRIMARY KEY NOT NULL, "href_a" TEXT, "href_b" TEXT, "hash_a" TEXT, "hash_b" TEXT, "etag_a" TEXT, "etag_b" TEXT ); """ ) def _is_latest_version(self): try: return bool( self._c.execute( "SELECT version FROM meta WHERE version = ?", (self.SCHEMA_VERSION,) ).fetchone() ) except sqlite3.OperationalError: return False @contextlib.contextmanager def transaction(self): old_c = self._c try: with _exclusive_transaction(self._c) as new_c: self._c = new_c yield self._c.execute("DELETE FROM status") self._c.execute("INSERT INTO status " "SELECT * FROM new_status") self._c.execute("DELETE FROM new_status") finally: self._c = old_c def insert_ident_a(self, ident, a_props): # FIXME: Super inefficient old_props = self.get_new_a(ident) if old_props is not None: raise IdentAlreadyExists(old_href=old_props.href, new_href=a_props.href) b_props = self.get_new_b(ident) or ItemMetadata() self._c.execute( "INSERT OR REPLACE INTO new_status " "VALUES(?, ?, ?, ?, ?, ?, ?)", ( ident, a_props.href, b_props.href, a_props.hash, b_props.hash, a_props.etag, b_props.etag, ), ) def insert_ident_b(self, ident, b_props): # FIXME: Super inefficient old_props = self.get_new_b(ident) if old_props is not None: raise IdentAlreadyExists(old_href=old_props.href, new_href=b_props.href) a_props = self.get_new_a(ident) or ItemMetadata() self._c.execute( "INSERT OR REPLACE INTO new_status " "VALUES(?, ?, ?, ?, ?, ?, ?)", ( ident, a_props.href, b_props.href, a_props.hash, b_props.hash, a_props.etag, b_props.etag, ), ) def update_ident_a(self, ident, props): self._c.execute( "UPDATE new_status" " SET href_a=?, hash_a=?, etag_a=?" " WHERE ident=?", (props.href, props.hash, props.etag, ident), ) assert self._c.rowcount > 0 def update_ident_b(self, ident, props): self._c.execute( "UPDATE new_status" " SET href_b=?, hash_b=?, etag_b=?" " WHERE ident=?", (props.href, props.hash, props.etag, ident), ) assert self._c.rowcount > 0 def remove_ident(self, ident): self._c.execute("DELETE FROM new_status WHERE ident=?", (ident,)) def _get_impl(self, ident, side, table): res = self._c.execute( "SELECT href_{side} AS href," " hash_{side} AS hash," " etag_{side} AS etag " "FROM {table} WHERE ident=?".format(side=side, table=table), (ident,), ).fetchone() if res is None: return None if res["hash"] is None: # FIXME: Implement as constraint in db assert res["href"] is None assert res["etag"] is None return None res = dict(res) return ItemMetadata(**res) def get_a(self, ident): return self._get_impl(ident, side="a", table="status") def get_b(self, ident): return self._get_impl(ident, side="b", table="status") def get_new_a(self, ident): return self._get_impl(ident, side="a", table="new_status") def get_new_b(self, ident): return self._get_impl(ident, side="b", table="new_status") def iter_old(self): return iter( res["ident"] for res in self._c.execute("SELECT ident FROM status").fetchall() ) def iter_new(self): return iter( res["ident"] for res in self._c.execute("SELECT ident FROM new_status").fetchall() ) def rollback(self, ident): a = self.get_a(ident) b = self.get_b(ident) assert (a is None) == (b is None) if a is None and b is None: self.remove_ident(ident) return self._c.execute( "INSERT OR REPLACE INTO new_status" " VALUES (?, ?, ?, ?, ?, ?, ?)", (ident, a.href, b.href, a.hash, b.hash, a.etag, b.etag), ) def _get_by_href_impl(self, href, default=(None, None), side=None): res = self._c.execute( "SELECT ident, hash_{side} AS hash, etag_{side} AS etag " "FROM status WHERE href_{side}=?".format(side=side), (href,), ).fetchone() if not res: return default return res["ident"], ItemMetadata( href=href, hash=res["hash"], etag=res["etag"], ) def get_by_href_a(self, *a, **kw): kw["side"] = "a" return self._get_by_href_impl(*a, **kw) def get_by_href_b(self, *a, **kw): kw["side"] = "b" return self._get_by_href_impl(*a, **kw) class SubStatus: def __init__(self, parent, side): self.parent = parent assert side in "ab" self.remove_ident = parent.remove_ident if side == "a": self.insert_ident = parent.insert_ident_a self.update_ident = parent.update_ident_a self.get = parent.get_a self.get_new = parent.get_new_a self.get_by_href = parent.get_by_href_a else: self.insert_ident = parent.insert_ident_b self.update_ident = parent.update_ident_b self.get = parent.get_b self.get_new = parent.get_new_b self.get_by_href = parent.get_by_href_b class ItemMetadata: href = None hash = None etag = None def __init__(self, **kwargs): for k, v in kwargs.items(): assert hasattr(self, k) setattr(self, k, v) def to_status(self): return {"href": self.href, "etag": self.etag, "hash": self.hash} vdirsyncer-0.18.0/vdirsyncer/utils.py000066400000000000000000000145301406140636100176760ustar00rootroot00000000000000import functools import os import sys import uuid from inspect import getfullargspec from . import exceptions # This is only a subset of the chars allowed per the spec. In particular `@` is # not included, because there are some servers that (incorrectly) encode it to # `%40` when it's part of a URL path, and reject or "repair" URLs that contain # `@` in the path. So it's better to just avoid it. SAFE_UID_CHARS = ( "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789_.-+" ) _missing = object() def expand_path(p): p = os.path.expanduser(p) p = os.path.normpath(p) return p def split_dict(d, f): """Puts key into first dict if f(key), otherwise in second dict""" a, b = split_sequence(d.items(), lambda item: f(item[0])) return dict(a), dict(b) def split_sequence(s, f): """Puts item into first list if f(item), else in second list""" a = [] b = [] for item in s: if f(item): a.append(item) else: b.append(item) return a, b def uniq(s): """Filter duplicates while preserving order. ``set`` can almost always be used instead of this, but preserving order might prove useful for debugging.""" d = set() for x in s: if x not in d: d.add(x) yield x def get_etag_from_file(f): """Get etag from a filepath or file-like object. This function will flush/sync the file as much as necessary to obtain a correct value. """ if hasattr(f, "read"): f.flush() # Only this is necessary on Linux if sys.platform == "win32": os.fsync(f.fileno()) # Apparently necessary on Windows stat = os.fstat(f.fileno()) else: stat = os.stat(f) mtime = getattr(stat, "st_mtime_ns", None) if mtime is None: mtime = stat.st_mtime return f"{mtime:.9f};{stat.st_ino}" def get_storage_init_specs(cls, stop_at=object): if cls is stop_at: return () spec = getfullargspec(cls.__init__) traverse_superclass = getattr(cls.__init__, "_traverse_superclass", True) if traverse_superclass: if traverse_superclass is True: # noqa supercls = next( getattr(x.__init__, "__objclass__", x) for x in cls.__mro__[1:] ) else: supercls = traverse_superclass superspecs = get_storage_init_specs(supercls, stop_at=stop_at) else: superspecs = () return (spec,) + superspecs def get_storage_init_args(cls, stop_at=object): """ Get args which are taken during class initialization. Assumes that all classes' __init__ calls super().__init__ with the rest of the arguments. :param cls: The class to inspect. :returns: (all, required), where ``all`` is a set of all arguments the class can take, and ``required`` is the subset of arguments the class requires. """ all, required = set(), set() for spec in get_storage_init_specs(cls, stop_at=stop_at): all.update(spec.args[1:]) last = -len(spec.defaults) if spec.defaults else len(spec.args) required.update(spec.args[1:last]) return all, required def checkdir(path, create=False, mode=0o750): """ Check whether ``path`` is a directory. :param create: Whether to create the directory (and all parent directories) if it does not exist. :param mode: Mode to create missing directories with. """ if not os.path.isdir(path): if os.path.exists(path): raise OSError(f"{path} is not a directory.") if create: os.makedirs(path, mode) else: raise exceptions.CollectionNotFound( "Directory {} does not exist.".format(path) ) def checkfile(path, create=False): """ Check whether ``path`` is a file. :param create: Whether to create the file's parent directories if they do not exist. """ checkdir(os.path.dirname(path), create=create) if not os.path.isfile(path): if os.path.exists(path): raise OSError(f"{path} is not a file.") if create: with open(path, "wb"): pass else: raise exceptions.CollectionNotFound("File {} does not exist.".format(path)) class cached_property: """A read-only @property that is only evaluated once. Only usable on class instances' methods. """ def __init__(self, fget, doc=None): self.__name__ = fget.__name__ self.__module__ = fget.__module__ self.__doc__ = doc or fget.__doc__ self.fget = fget def __get__(self, obj, cls): if obj is None: # pragma: no cover return self obj.__dict__[self.__name__] = result = self.fget(obj) return result def href_safe(ident, safe=SAFE_UID_CHARS): return not bool(set(ident) - set(safe)) def generate_href(ident=None, safe=SAFE_UID_CHARS): """ Generate a safe identifier, suitable for URLs, storage hrefs or UIDs. If the given ident string is safe, it will be returned, otherwise a random UUID. """ if not ident or not href_safe(ident, safe): return str(uuid.uuid4()) else: return ident def synchronized(lock=None): if lock is None: from threading import Lock lock = Lock() def inner(f): @functools.wraps(f) def wrapper(*args, **kwargs): with lock: return f(*args, **kwargs) return wrapper return inner def open_graphical_browser(url, new=0, autoraise=True): """Open a graphical web browser. This is basically like `webbrowser.open`, but without trying to launch CLI browsers at all. We're excluding those since it's undesirable to launch those when you're using vdirsyncer on a server. Rather copypaste the URL into the local browser, or use the URL-yanking features of your terminal emulator. """ import webbrowser cli_names = {"www-browser", "links", "links2", "elinks", "lynx", "w3m"} if webbrowser._tryorder is None: # Python 3.7 webbrowser.register_standard_browsers() for name in webbrowser._tryorder: if name in cli_names: continue browser = webbrowser.get(name) if browser.open(url, new, autoraise): return raise RuntimeError("No graphical browser found. Please open the URL " "manually.") vdirsyncer-0.18.0/vdirsyncer/vobject.py000066400000000000000000000263551406140636100202020ustar00rootroot00000000000000import hashlib from itertools import chain from itertools import tee from .utils import cached_property from .utils import uniq IGNORE_PROPS = ( # PRODID is changed by radicale for some reason after upload "PRODID", # Sometimes METHOD:PUBLISH is added by WebCAL providers, for us it doesn't # make a difference "METHOD", # X-RADICALE-NAME is used by radicale, because hrefs don't really exist in # their filesystem backend "X-RADICALE-NAME", # Apparently this is set by Horde? # https://github.com/pimutils/vdirsyncer/issues/318 "X-WR-CALNAME", # Those are from the VCARD specification and is supposed to change when the # item does -- however, we can determine that ourselves "REV", "LAST-MODIFIED", "CREATED", # Some iCalendar HTTP calendars generate the DTSTAMP at request time, so # this property always changes when the rest of the item didn't. Some do # the same with the UID. # # - Google's read-only calendar links # - http://www.feiertage-oesterreich.at/ "DTSTAMP", "UID", ) class Item: """Immutable wrapper class for VCALENDAR (VEVENT, VTODO) and VCARD""" def __init__(self, raw): assert isinstance(raw, str), type(raw) self._raw = raw def with_uid(self, new_uid): parsed = _Component.parse(self.raw) stack = [parsed] while stack: component = stack.pop() stack.extend(component.subcomponents) if component.name in ("VEVENT", "VTODO", "VJOURNAL", "VCARD"): del component["UID"] if new_uid: component["UID"] = new_uid return Item("\r\n".join(parsed.dump_lines())) @cached_property def raw(self): """Raw content of the item, as unicode string. Vdirsyncer doesn't validate the content in any way. """ return self._raw @cached_property def uid(self): """Global identifier of the item, across storages, doesn't change after a modification of the item.""" # Don't actually parse component, but treat all lines as single # component, avoiding traversal through all subcomponents. x = _Component("TEMP", self.raw.splitlines(), []) try: return x["UID"].strip() or None except KeyError: return None @cached_property def hash(self): """Hash of self.raw, used for etags.""" return hash_item(self.raw) @cached_property def ident(self): """Used for generating hrefs and matching up items during synchronization. This is either the UID or the hash of the item's content.""" # We hash the item instead of directly using its raw content, because # # 1. The raw content might be really large, e.g. when it's a contact # with a picture, which bloats the status file. # # 2. The status file would contain really sensitive information. return self.uid or self.hash @property def parsed(self): """Don't cache because the rv is mutable.""" try: return _Component.parse(self.raw) except Exception: return None def normalize_item(item, ignore_props=IGNORE_PROPS): """Create syntactically invalid mess that is equal for similar items.""" if not isinstance(item, Item): item = Item(item) item = _strip_timezones(item) x = _Component("TEMP", item.raw.splitlines(), []) for prop in IGNORE_PROPS: del x[prop] x.props.sort() return "\r\n".join(filter(bool, (line.strip() for line in x.props))) def _strip_timezones(item): parsed = item.parsed if not parsed or parsed.name != "VCALENDAR": return item parsed.subcomponents = [c for c in parsed.subcomponents if c.name != "VTIMEZONE"] return Item("\r\n".join(parsed.dump_lines())) def hash_item(text): return hashlib.sha256(normalize_item(text).encode("utf-8")).hexdigest() def split_collection(text): assert isinstance(text, str) inline = [] items = {} # uid => item ungrouped_items = [] for main in _Component.parse(text, multiple=True): _split_collection_impl(main, main, inline, items, ungrouped_items) for item in chain(items.values(), ungrouped_items): item.subcomponents.extend(inline) yield "\r\n".join(item.dump_lines()) def _split_collection_impl(item, main, inline, items, ungrouped_items): if item.name == "VTIMEZONE": inline.append(item) elif item.name == "VCARD": ungrouped_items.append(item) elif item.name in ("VTODO", "VEVENT", "VJOURNAL"): uid = item.get("UID", "") wrapper = _Component(main.name, main.props[:], []) if uid.strip(): wrapper = items.setdefault(uid, wrapper) else: ungrouped_items.append(wrapper) wrapper.subcomponents.append(item) elif item.name in ("VCALENDAR", "VADDRESSBOOK"): if item.name == "VCALENDAR": del item["METHOD"] for subitem in item.subcomponents: _split_collection_impl(subitem, item, inline, items, ungrouped_items) else: raise ValueError("Unknown component: {}".format(item.name)) _default_join_wrappers = { "VCALENDAR": "VCALENDAR", "VEVENT": "VCALENDAR", "VTODO": "VCALENDAR", "VCARD": "VADDRESSBOOK", } def join_collection(items, wrappers=_default_join_wrappers): """ :param wrappers: { item_type: wrapper_type } """ items1, items2 = tee((_Component.parse(x) for x in items), 2) item_type, wrapper_type = _get_item_type(items1, wrappers) wrapper_props = [] def _get_item_components(x): if x.name == wrapper_type: wrapper_props.extend(x.props) return x.subcomponents else: return [x] components = chain(*(_get_item_components(x) for x in items2)) lines = chain(*uniq(tuple(x.dump_lines()) for x in components)) if wrapper_type is not None: lines = chain( *( [f"BEGIN:{wrapper_type}"], # XXX: wrapper_props is a list of lines (with line-wrapping), so # filtering out duplicate lines will almost certainly break # multiline-values. Since the only props we usually need to # support are PRODID and VERSION, I don't care. uniq(wrapper_props), lines, [f"END:{wrapper_type}"], ) ) return "".join(line + "\r\n" for line in lines) def _get_item_type(components, wrappers): i = 0 for component in components: i += 1 try: item_type = component.name wrapper_type = wrappers[item_type] except KeyError: pass else: return item_type, wrapper_type if not i: return None, None else: raise ValueError("Not sure how to join components.") class _Component: """ Raw outline of the components. Vdirsyncer's operations on iCalendar and VCard objects are limited to retrieving the UID and splitting larger files into items. Consequently this parser is very lazy, with the downside that manipulation of item properties are extremely costly. Other features: - Preserve the original property order and wrapping. - Don't choke on irrelevant details like invalid datetime formats. Original version from https://github.com/collective/icalendar/, but apart from the similar API, very few parts have been reused. """ def __init__(self, name, lines, subcomponents): """ :param name: The component name. :param lines: The component's own properties, as list of lines (strings). :param subcomponents: List of components. """ self.name = name self.props = lines self.subcomponents = subcomponents @classmethod def parse(cls, lines, multiple=False): if isinstance(lines, bytes): lines = lines.decode("utf-8") if isinstance(lines, str): lines = lines.splitlines() stack = [] rv = [] try: for _i, line in enumerate(lines): if line.startswith("BEGIN:"): c_name = line[len("BEGIN:") :].strip().upper() stack.append(cls(c_name, [], [])) elif line.startswith("END:"): component = stack.pop() if stack: stack[-1].subcomponents.append(component) else: rv.append(component) else: if line.strip(): stack[-1].props.append(line) except IndexError: raise ValueError("Parsing error at line {}".format(_i + 1)) if multiple: return rv elif len(rv) != 1: raise ValueError("Found {} components, expected one.".format(len(rv))) else: return rv[0] def dump_lines(self): yield f"BEGIN:{self.name}" yield from self.props for c in self.subcomponents: yield from c.dump_lines() yield f"END:{self.name}" def __delitem__(self, key): prefix = (f"{key}:", f"{key};") new_lines = [] lineiter = iter(self.props) while True: for line in lineiter: if line.startswith(prefix): break else: new_lines.append(line) else: break for line in lineiter: if not line.startswith((" ", "\t")): new_lines.append(line) break self.props = new_lines def __setitem__(self, key, val): assert isinstance(val, str) assert "\n" not in val del self[key] line = f"{key}:{val}" self.props.append(line) def __contains__(self, obj): if isinstance(obj, type(self)): return obj not in self.subcomponents and not any( obj in x for x in self.subcomponents ) elif isinstance(obj, str): return self.get(obj, None) is not None else: raise ValueError(obj) def __getitem__(self, key): prefix_without_params = f"{key}:" prefix_with_params = f"{key};" iterlines = iter(self.props) for line in iterlines: if line.startswith(prefix_without_params): rv = line[len(prefix_without_params) :] break elif line.startswith(prefix_with_params): rv = line[len(prefix_with_params) :].split(":", 1)[-1] break else: raise KeyError() for line in iterlines: if line.startswith((" ", "\t")): rv += line[1:] else: break return rv def get(self, key, default=None): try: return self[key] except KeyError: return default def __eq__(self, other): return ( isinstance(other, type(self)) and self.name == other.name and self.props == other.props and self.subcomponents == other.subcomponents )