pax_global_header00006660000000000000000000000064136025242400014510gustar00rootroot0000000000000052 comment=02a7781aaaba5b6bd19bd813fa35100db0e6160f onedrive-2.3.13/000077500000000000000000000000001360252424000134115ustar00rootroot00000000000000onedrive-2.3.13/.github/000077500000000000000000000000001360252424000147515ustar00rootroot00000000000000onedrive-2.3.13/.github/ISSUE_TEMPLATE/000077500000000000000000000000001360252424000171345ustar00rootroot00000000000000onedrive-2.3.13/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000043261360252424000216330ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve --- **Note:** Before submitting a bug report, please ensure you are running the latest 'onedrive' client as built from 'master' and by using the latest available DMD compiler. Refer to the readme on building the client for your system. ### Bug Report Details ### **Describe the bug** A clear and concise description of what the bug is. **Application and Operating System Details:** * OS: Output of `uname -a` & provide your OS & version (CentOS 6.x, Ubuntu 18.x etc) * Are you using a headless system (no gui) or with a gui installed? * OneDrive Account Type * DMD or LDC compiler version `dmd --version` or `ldmd2 --version` * Application configuration: Output of `onedrive --display-config` * Curl Version: Output of `curl --version` **Note:** Please generate a full debug log as per [https://github.com/abraunegg/onedrive/wiki/Generate-debug-log-for-support](https://github.com/abraunegg/onedrive/wiki/Generate-debug-log-for-support) and email to support@mynas.com.au **To Reproduce** Steps to reproduce the behavior if not causing an application crash: 1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error If issue is replicated by a specific 'file' or 'path' please archive the file and path tree & email to support@mynas.com.au **Complete Verbose Log Output** A clear and full log of the problem when running the application in the following manner (ie, not in monitor mode): ```bash onedrive --synchronize --verbose ``` Run the application in a separate terminal window or SSH session and provide the entire application output including the error & crash. When posing the logs, Please format log output to make it easier to read. See [https://guides.github.com/features/mastering-markdown/](https://guides.github.com/features/mastering-markdown/) for more details. Application Log Output: ```bash Verbose console log output goes here ``` **Screenshots** If applicable, add screenshots to help explain your problem. **Additional context** Add any other context about the problem here. ### Bug Report Checklist ### * [] Detailed description * [] Reproduction steps (if applicable) * [] Verbose Log Output onedrive-2.3.13/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000010561360252424000226630ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project --- **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when ... **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here. onedrive-2.3.13/.github/lock.yml000066400000000000000000000022021360252424000164200ustar00rootroot00000000000000# Configuration for lock-threads - https://github.com/dessant/lock-threads # Number of days of inactivity before a closed issue or pull request is locked daysUntilLock: 30 # Skip issues and pull requests created before a given timestamp. Timestamp must # follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable skipCreatedBefore: false # Issues and pull requests with these labels will not be locked. Set to `[]` to disable exemptLabels: [] # Label to add before locking, such as `outdated`. Set to `false` to disable lockLabel: false # Comment to post before locking. Set to `false` to disable lockComment: > This thread has been automatically locked since there has not been any recent activity after it was closed. Please open a new issue for related bugs. # Assign `resolved` as the reason for locking. Set to `false` to disable setLockReason: false # Limit to only `issues` or `pulls` # only: issues # Optionally, specify configuration settings just for `issues` or `pulls` # issues: # exemptLabels: # - help-wanted # lockLabel: outdated # pulls: # daysUntilLock: 30 # Repository to extend settings from # _extends: repo onedrive-2.3.13/.gitignore000066400000000000000000000003221360252424000153760ustar00rootroot00000000000000.* onedrive onedrive.1 onedrive.o contrib/systemd/onedrive.service contrib/systemd/onedrive@.service version Makefile config.log config.status autom4te.cache/ contrib/pacman/PKGBUILD contrib/spec/onedrive.spec onedrive-2.3.13/.travis-ci.sh000066400000000000000000000153341360252424000157320ustar00rootroot00000000000000#!/bin/bash # Based on a test script from avsm/ocaml repo https://github.com/avsm/ocaml # Adapted from https://www.tomaz.me/2013/12/02/running-travis-ci-tests-on-arm.html # Adapted from https://github.com/PJK/libcbor/blob/master/.travis-qemu.sh # Adapted from https://gist.github.com/oznu/b5efd7784e5a820ec3746820f2183dc0 # Adapted from https://blog.lazy-evaluation.net/posts/linux/debian-armhf-bootstrap.html # Adapted from https://blog.lazy-evaluation.net/posts/linux/debian-stretch-arm64.html set -e # CHROOT Directory CHROOT_DIR=/tmp/chroot # Debian package dependencies for the host to run ARM under QEMU DEBIAN_MIRROR="http://httpredir.debian.org/debian" HOST_DEPENDENCIES="qemu-user-static binfmt-support debootstrap sbuild wget" # Debian package dependencies for the chrooted environment GUEST_DEPENDENCIES="build-essential libcurl4-openssl-dev libsqlite3-dev libgnutls-openssl27 git pkg-config libxml2" # LDC Version # Different versions due to https://github.com/ldc-developers/ldc/issues/3027 # LDC v1.16.0 re-introduces ARMHF and ARM64 version - https://github.com/ldc-developers/ldc/releases/tag/v1.16.0 LDC_VERSION_ARMHF=1.16.0 LDC_VERSION_ARM64=1.16.0 function setup_arm32_chroot { # Update apt repository details sudo apt-get update # 32Bit Variables VERSION=jessie CHROOT_ARCH=armhf # Host dependencies sudo apt-get install -qq -y ${HOST_DEPENDENCIES} # Download LDC compiler wget https://github.com/ldc-developers/ldc/releases/download/v${LDC_VERSION_ARMHF}/ldc2-${LDC_VERSION_ARMHF}-linux-armhf.tar.xz tar -xf ldc2-${LDC_VERSION_ARMHF}-linux-armhf.tar.xz mv ldc2-${LDC_VERSION_ARMHF}-linux-armhf dlang-${ARCH} rm -rf ldc2-${LDC_VERSION_ARMHF}-linux-armhf.tar.xz # Create chrooted environment sudo mkdir ${CHROOT_DIR} sudo debootstrap --foreign --no-check-gpg --variant=buildd --arch=${CHROOT_ARCH} ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR} sudo cp /usr/bin/qemu-arm-static ${CHROOT_DIR}/usr/bin/ sudo chroot ${CHROOT_DIR} /debootstrap/debootstrap --second-stage sudo sbuild-createchroot --arch=${CHROOT_ARCH} --foreign --setup-only ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR} configure_chroot } function setup_arm64_chroot { # Update apt repository details sudo apt-get update # 64Bit Variables VERSION64=stretch CHROOT_ARCH64=arm64 # Host dependencies sudo apt-get install -qq -y ${HOST_DEPENDENCIES} # Download LDC compiler wget https://github.com/ldc-developers/ldc/releases/download/v${LDC_VERSION_ARM64}/ldc2-${LDC_VERSION_ARM64}-linux-aarch64.tar.xz tar -xf ldc2-${LDC_VERSION_ARM64}-linux-aarch64.tar.xz mv ldc2-${LDC_VERSION_ARM64}-linux-aarch64 dlang-${ARCH} rm -rf ldc2-${LDC_VERSION_ARM64}-linux-aarch64.tar.xz # ARM64 qemu-debootstrap needs to be 1.0.78, Trusty is 1.0.59 #sudo echo "deb http://archive.ubuntu.com/ubuntu xenial main restricted universe multiverse" >> /etc/apt/sources.list echo "deb http://archive.ubuntu.com/ubuntu xenial main restricted universe multiverse" | sudo tee -a /etc/apt/sources.list > /dev/null sudo apt-get update sudo apt-get install -t xenial debootstrap # Create chrooted environment sudo mkdir ${CHROOT_DIR} sudo qemu-debootstrap --arch=${CHROOT_ARCH64} ${VERSION64} ${CHROOT_DIR} ${DEBIAN_MIRROR} configure_chroot } function setup_x32_chroot { # Update apt repository details sudo apt-get update # 32Bit Variables VERSION=jessie CHROOT_ARCH32=i386 # Host dependencies sudo apt-get install -qq -y ${HOST_DEPENDENCIES} # Download DMD compiler DMDVER=2.083.1 wget http://downloads.dlang.org/releases/2.x/${DMDVER}/dmd.${DMDVER}.linux.tar.xz tar -xf dmd.${DMDVER}.linux.tar.xz mv dmd2 dlang-${ARCH} rm -rf dmd.${DMDVER}.linux.tar.xz # Create chrooted environment sudo mkdir ${CHROOT_DIR} sudo debootstrap --foreign --no-check-gpg --variant=buildd --arch=${CHROOT_ARCH32} ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR} sudo cp /usr/bin/qemu-i386-static ${CHROOT_DIR}/usr/bin/ sudo cp /usr/bin/qemu-x86_64-static ${CHROOT_DIR}/usr/bin/ sudo chroot ${CHROOT_DIR} /debootstrap/debootstrap --second-stage sudo sbuild-createchroot --arch=${CHROOT_ARCH32} --foreign --setup-only ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR} configure_chroot } function configure_chroot { # Create file with environment variables which will be used inside chrooted environment echo "export ARCH=${ARCH}" > envvars.sh echo "export TRAVIS_BUILD_DIR=${TRAVIS_BUILD_DIR}" >> envvars.sh chmod a+x envvars.sh # Install dependencies inside chroot sudo chroot ${CHROOT_DIR} apt-get update sudo chroot ${CHROOT_DIR} apt-get --allow-unauthenticated install -qq -y ${GUEST_DEPENDENCIES} # Create build dir and copy travis build files to our chroot environment sudo mkdir -p ${CHROOT_DIR}/${TRAVIS_BUILD_DIR} sudo rsync -a ${TRAVIS_BUILD_DIR}/ ${CHROOT_DIR}/${TRAVIS_BUILD_DIR}/ # Indicate chroot environment has been set up sudo touch ${CHROOT_DIR}/.chroot_is_done # Call ourselves again which will cause tests to run sudo chroot ${CHROOT_DIR} bash -c "cd ${TRAVIS_BUILD_DIR} && chmod a+x ./.travis-ci.sh" sudo chroot ${CHROOT_DIR} bash -c "cd ${TRAVIS_BUILD_DIR} && ./.travis-ci.sh" } function build_onedrive { # Depending on architecture, build onedrive using applicable tool echo "$(uname -a)" HOMEDIR=$(pwd) if [ "${ARCH}" = "x64" ]; then # Build on x86_64 as normal ./configure make clean; make; else if [ "${ARCH}" = "x32" ]; then # 32Bit DMD Build ./configure DC=${HOMEDIR}/dlang-${ARCH}/linux/bin32/dmd make clean; make else # LDC Build - ARM32, ARM64 ./configure DC=${HOMEDIR}/dlang-${ARCH}/bin/ldmd2 make clean; make fi fi # Functional testing of built application test_onedrive } function test_onedrive { # Testing onedrive client - does the built application execute? ./onedrive --version # Functional testing on x64 only if [ "${ARCH}" = "x64" ]; then chmod a+x ./tests/makefiles.sh cd ./tests/ ./makefiles.sh cd .. mkdir -p ~/.config/onedrive/ echo $ODP > ~/.config/onedrive/refresh_token ./onedrive --synchronize --verbose --syncdir '~/OneDriveALT' # OneDrive Cleanup rm -rf ~/OneDriveALT/* ./onedrive --synchronize --verbose --syncdir '~/OneDriveALT' fi } if [ "${ARCH}" = "arm32" ] || [ "${ARCH}" = "arm64" ] || [ "${ARCH}" = "x32" ]; then if [ -e "/.chroot_is_done" ]; then # We are inside ARM chroot echo "Running inside chrooted QEMU ${ARCH} environment" . ./envvars.sh export PATH="$PATH:/usr/sbin:/sbin:/bin" build_onedrive else # Need to set up chrooted environment first echo "Setting up chrooted ${ARCH} build environment" if [ "${ARCH}" = "x32" ]; then # 32Bit i386 Environment setup_x32_chroot else if [ "${ARCH}" = "arm32" ]; then # 32Bit ARM Environment setup_arm32_chroot else # 64Bit ARM Environment setup_arm64_chroot fi fi fi else # Proceed as normal echo "Running an x86_64 Build" build_onedrive fi onedrive-2.3.13/.travis.yml000066400000000000000000000004101360252424000155150ustar00rootroot00000000000000# sudo access is required sudo: required # Compilation language language: d # Use latest DMD d: - dmd # What build architectures will we build on env: - ARCH=x64 - ARCH=x32 - ARCH=arm32 - ARCH=arm64 script: - "bash -ex .travis-ci.sh"onedrive-2.3.13/CHANGELOG.md000066400000000000000000000616171360252424000152350ustar00rootroot00000000000000# Changelog The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). ## 2.3.13 - 2019-12-31 ### Fixed * Change the sync list override flag to false as default when not using sync_list * Fix --dry-run output when using --upload-only & --no-remote-delete and deleting local files ### Added * Add a verbose log entry when a monitor sync loop with OneDrive starts & completes ### Changed * Remove logAndNotify for 'processing X changes' as it is excessive for each change bundle to inform the desktop of the number of changes the client is processing ### Updated * Updated INSTALL.md with Ubuntu 16.x i386 build instructions to reflect working configuration on legacy hardware * Updated INSTALL.md with details of Linux packages * Updated INSTALL.md build instructions for CentOS platforms ## 2.3.12 - 2019-12-04 ### Fixed * Retry session upload fragment when transient errors occur to prevent silent upload failure * Update Microsoft restriction and limitations about windows naming files to include '~' for folder names * Docker guide fixes, add multiple account setup instructions * Check database for excluded sync_list items previously in scope * Catch DNS resolution error * Fix where an item now out of scope should be flagged for local delete * Fix rebuilding of onedrive, but ensure version is properly updated * Update Ubuntu i386 build instructions to use DMD using preferred method ### Added * Add debug message to when a message is sent to dbus or notification daemon * Add i386 instructions for legacy low memory platforms using LDC ## 2.3.11 - 2019-11-05 ### Fixed * Fix typo in the documentation regarding invalid config when upgrading from 'skilion' codebase * Fix handling of skip_dir, skip_file & sync_list config options * Fix typo in the documentation regarding sync_list * Fix log output to be consistent with sync_list exclusion * Fix 'Processing X changes' output to be more reflective of actual activity when using sync_list * Remove unused and unexported SED variable in Makefile.in * Handle curl exceptions and timeouts better with backoff/retry logic * Update skip_dir pattern matching when using wildcards * Fix when a full rescan is performed when using sync_list * Fix 'Key not found: name' when computing skip_dir path * Fix call from --monitor to observe --no-remote-delete * Fix unhandled exception when monitor initialisation failure occurs due to too many open local files * Fix unhandled 412 error response from OneDrive API when moving files right after upload * Fix --monitor when used with --download-only. This fixes a regression introduced in 12947d1. * Fix if --single-directory is being used, and we are using --monitor, only set inotify watches on the single directory ### Changed * Move JSON logging output from error messages to debug output ## 2.3.10 - 2019-10-01 ### Fixed * Fix searching for 'name' when deleting a synced item, if the OneDrive API does not return the expected details in the API call * Fix abnormal termination when no Internet connection * Fix downloading of files from OneDrive Personal Shared Folders when the OneDrive API responds with unexpected additional path data * Fix logging of 'initialisation' of client to actually when the attempt to initialise is performed * Fix when using a sync_list file, using deltaLink will actually 'miss' changes (moves & deletes) on OneDrive as using sync_list discards changes * Fix OneDrive API status code 500 handling when uploading files as error message is not correct * Fix crash when resume_upload file is not a valid JSON * Fix crash when a file system exception is generated when attempting to update the file date & time and this fails ### Added * If there is a case-insensitive match error, also return the remote name from the response * Make user-agent string a configuration option & add to config file * Set default User-Agent to 'OneDrive Client for Linux v{version}' ### Changed * Make verbose logging output optional on Docker * Enable --resync & debug client output via environment variables on Docker ## 2.3.9 - 2019-09-01 ### Fixed * Catch a 403 Forbidden exception when querying Sharepoint Library Names * Fix unhandled error exceptions that cause application to exit / crash when uploading files * Fix JSON object validation for queries made against OneDrive where a JSON response is expected and where that response is to be used and expected to be valid * Fix handling of 5xx responses from OneDrive when uploading via a session ### Added * Detect the need for --resync when config changes either via config file or cli override ### Changed * Change minimum required version of LDC to v1.12.0 ### Removed * Remove redundant logging output due to change in how errors are reported from OneDrive ## 2.3.8 - 2019-08-04 ### Fixed * Fix unable to download all files when OneDrive fails to return file level details used to validate file integrity * Included the flag "-m" to create the home directory when creating the user * Fix entrypoint.sh to work with "sudo docker run" * Fix docker build error on stretch * Fix hidden directories in 'root' from having prefix removed * Fix Sharepoint Document Library handling for .txt & .csv files * Fix logging for init.d service * Fix OneDrive response missing required 'id' element when uploading images * Fix 'Unexpected character '<'. (Line 1:1)' when OneDrive has an exception error * Fix error when creating the sync dir fails when there is no permission to create the sync dir ### Added * Add explicit check for hashes to be returned in cases where OneDrive API fails to provide them despite requested to do so * Add comparison with sha1 if OneDrive provides that rather than quickXor * Add selinux configuration details for a sync folder outside of the home folder * Add date tag on docker.hub * Add back CentOS 6 install & uninstall to Makefile * Add a check to handle moving items out of sync_list sync scope & delete locally if true * Implement --get-file-link which will return the weburl of a file which has been synced to OneDrive ### Changed * Change unauthorized-api exit code to 3 * Update LDC to v1.16.0 for Travis CI testing * Use replace function for modified Sharepoint Document Library files rather than delete and upload as new file, preserving file history * Update Sharepoint modified file handling for files > 4Mb in size ### Removed * Remove -d shorthand for --download-only to avoid confusion with other GNU applications where -d stands for 'debug' ## 2.3.7 - 2019-07-03 ### Fixed * Fix not all files being downloaded due to OneDrive query failure * False DB update which potentially could had lead to false data loss on OneDrive ## 2.3.6 - 2019-07-03 (DO NOT USE) ### Fixed * Fix JSONValue object validation * Fix building without git being available * Fix some spelling/grammatical errors * Fix OneDrive error response on creating upload session ### Added * Add download size & hash check to ensure downloaded files are valid and not corrupt * Added --force-http-2 to use HTTP/2 if desired ### Changed * Depreciated --force-http-1.1 (enabled by default) due to OneDrive inconsistent behavior with HTTP/2 protocol ## 2.3.5 - 2019-06-19 ### Fixed * Handle a directory in the sync_dir when no permission to access * Get rid of forced root necessity during installation * Fix broken autoconf code for --enable-XXX options * Fix so that skip_size check should only be used if configured * Fix a OneDrive Internal Error exception occurring before attempting to download a file ### Added * Check for supported version of D compiler ## 2.3.4 - 2019-06-13 ### Fixed * Fix 'Local files not deleted' when using bad 'skip_file' entry * Fix --dry-run logging output for faking downloading new files * Fix install unit files to correct location on RHEL/CentOS 7 * Fix up unit file removal on all platforms * Fix setting times on a file by adding a check to see if the file was actually downloaded before attempting to set the times on the file * Fix an unhandled curl exception when OneDrive throws an internal timeout error * Check timestamp to ensure that latest timestamp is used when comparing OneDrive changes * Fix handling responses where cTag JSON elements are missing * Fix Docker entrypoint.sh failures when GID is defined but not UID ### Added * Add autoconf based build system * Add an encoding validation check before any path length checks are performed as if the path contains any invalid UTF-8 sequences * Implement --sync-root-files to sync all files in the OneDrive root when using a sync_list file that would normally exclude these files from being synced * Implement skip_size feature request * Implement feature request to support file based OneDrive authorization (request | response) ### Updated * Better handle initialisation issues when OneDrive / MS Graph is experiencing problems that generate 401 & 5xx error codes * Enhance error message when unable to connect to Microsoft OneDrive service when the local CA SSL certificate(s) have issues * Update Dockerfile to correctly build on Docker Hub * Rework directory layout and re-factor MD files for readability ## 2.3.3 - 2019-04-16 ### Fixed * Fix --upload-only check for Sharepoint uploads * Fix check to ensure item root we flag as 'root' actually is OneDrive account 'root' * Handle object error response from OneDrive when uploading to OneDrive Business * Fix handling of some OneDrive accounts not providing 'quota' details * Fix 'resume_upload' handling in the event of bad OneDrive response ### Added * Add debugging for --get-O365-drive-id function * Add shell (bash,zsh) completion support * Add config options for command line switches to allow for better config handling in docker containers ### Updated * Implement more meaningful 5xx error responses * Update onedrive.logrotate indentations and comments * Update 'min_notif_changes' to 'min_notify_changes' ## 2.3.2 - 2019-04-02 ### Fixed * Reduce scanning the entire local system in monitor mode for local changes * Resolve file creation loop when working directly in the synced folder and Microsoft Sharepoint ### Added * Add 'monitor_fullscan_frequency' config option to set the frequency of performing a full disk scan when in monitor mode ### Updated * Update default 'skip_file' to include tmp and lock files generated by LibreOffice * Update database version due to changing defaults of 'skip_file' which will force a rebuild and use of new skip_file default regex ## 2.3.1 - 2019-03-26 ### Fixed * Resolve 'make install' issue where rebuild of application would occur due to 'version' being flagged as .PHONY * Update readme build instructions to include 'make clean;' before build to ensure that 'version' is cleanly removed and can be updated correctly * Update Debian Travis CI build URL's ## 2.3.0 - 2019-03-25 ### Fixed * Resolve application crash if no 'size' value is returned when uploading a new file * Resolve application crash if a 5xx error is returned when uploading a new file * Resolve not 'refreshing' version file when rebuilding * Resolve unexpected application processing by preventing use of --synchronize & --monitor together * Resolve high CPU usage when performing DB reads * Update error logging around directory case-insensitive match * Update Travis CI and ARM dependencies for LDC 1.14.0 * Update Makefile due to build failure if building from release archive file * Update logging as to why a OneDrive object was skipped ### Added * Implement config option 'skip_dir' ## 2.2.6 - 2019-03-12 ### Fixed * Resolve application crash when unable to delete remote folders when business retention policies are enabled * Resolve deprecation warning: loop index implicitly converted from size_t to int * Resolve warnings regarding 'bashisms' * Resolve handling of notification failure is dbus server has not started or available * Resolve handling of response JSON to ensure that 'id' key element is always checked for * Resolve excessive & needless logging in monitor mode * Resolve compiling with LDC on Alpine as musl lacks some standard interfaces * Resolve notification issues when offline and cannot act on changes * Resolve Docker entrypoint.sh to accept command line arguments * Resolve to create a new upload session on reinit * Resolve where on OneDrive query failure, default root and drive id is used if a response is not returned * Resolve Key not found: nextExpectedRanges when attempting session uploads and incorrect response is returned * Resolve application crash when re-using an authentication URI twice after previous --logout * Resolve creating a folder on a shared personal folder appears successful but returns a JSON error * Resolve to treat mv of new file as upload of mv target * Update Debian i386 build dependencies * Update handling of --get-O365-drive-id to print out all 'site names' that match the explicit search entry rather than just the last match * Update Docker readme & documentation * Update handling of validating local file permissions for new file uploads ### Added * Add support for install & uninstall on RHEL / CentOS 6.x * Add support for when notifications are enabled, display the number of OneDrive changes to process if any are found * Add 'config' option 'min_notif_changes' for minimum number of changes to notify on, default = 5 * Add additional Docker container builds utilising a smaller OS footprint * Add configurable interval of logging in monitor mode * Implement new CLI option --skip-dot-files to skip .files and .folders if option is used * Implement new CLI option --check-for-nosync to ignore folder when special file (.nosync) present * Implement new CLI option --dry-run ## 2.2.5 - 2019-01-16 ### Fixed * Update handling of HTTP 412 - Precondition Failed errors * Update --display-config to display sync_list if configured * Add a check for 'id' key on metadata update to prevent 'std.json.JSONException@std/json.d(494): Key not found: id' * Update handling of 'remote' folder designation as 'root' items * Ensure that remote deletes are handled correctly * Handle 'Item not found' exception when unable to query OneDrive 'root' for changes * Add handling for JSON response error when OneDrive API returns a 404 due to OneDrive API regression * Fix items highlighted by codacy review ### Added * Add --force-http-1.1 flag to downgrade any HTTP/2 curl operations to HTTP 1.1 protocol * Support building with ldc2 and usage of pkg-config for lib finding ## 2.2.4 - 2018-12-28 ### Fixed * Resolve JSONException when supplying --get-O365-drive-id option with a string containing spaces * Resolve 'sync_dir' not read from 'config' file when run in Docker container * Resolve logic where potentially a 'default' ~/OneDrive sync_dir could be set despite 'config' file configured for an alternate * Make sure sqlite checkpointing works by properly finalizing statements * Update logic handling of --single-directory to prevent inadvertent local data loss * Resolve signal handling and database shutdown on SIGINT and SIGTERM * Update man page * Implement better help output formatting ### Added * Add debug handling for sync_dir operations * Add debug handling for homePath calculation * Add debug handling for configDirBase calculation * Add debug handling if syncDir is created * Implement Feature Request: Add status command or switch ## 2.2.3 - 2018-12-20 ### Fixed * Fix syncdir option is ignored ## 2.2.2 - 2018-12-20 ### Fixed * Handle short lived files in monitor mode * Provide better log messages, less noise on temporary timeouts * Deal with items that disappear during upload * Deal with deleted move targets * Reinitialize sync engine after three failed attempts * Fix activation of dmd for docker builds * Fix to check displayName rather than description for --get-O365-drive-id * Fix checking of config file keys for validity * Fix exception handling when missing parameter from usage option ### Added * Notification support via libnotify * Add very verbose (debug) mode by double -v -v * Implement option --display-config ## 2.2.1 - 2018-12-04 ### Fixed * Gracefully handle connection errors in monitor mode * Fix renaming of files when syncing * Installation of doc files, addition of man page * Adjust timeout values for libcurl * Continue in monitor mode when sync timed out * Fix unreachable statements * Update Makefile to better support packaging * Allow starting offline in monitor mode ### Added * Implement --get-O365-drive-id to get correct SharePoint Shared Library (#248) * Docker buildfiles for onedrive service (#262) ## 2.2.0 - 2018-11-24 ### Fixed * Updated client to output additional logging when debugging * Resolve database assertion failure due to authentication * Resolve unable to create folders on shared OneDrive Personal accounts ### Added * Implement feature request to Sync from Microsoft SharePoint * Implement feature request to specify a logging directory if logging is enabled ### Changed * Change '--download' to '--download-only' to align with '--upload-only' * Change logging so that logging to a separate file is no longer the default ## 2.1.6 - 2018-11-15 ### Fixed * Updated HTTP/2 transport handling when using curl 7.62.0 for session uploads ### Added * Added PKGBUILD for makepkg for building packages under Arch Linux ## 2.1.5 - 2018-11-11 ### Fixed * Resolve 'Key not found: path' when syncing from some shared folders due to OneDrive API change * Resolve to only upload changes on remote folder if the item is in the database - dont assert if false * Resolve files will not download or upload when using curl 7.62.0 due to HTTP/2 being set as default for all curl operations * Resolve to handle HTTP request returned status code 412 (Precondition Failed) for session uploads to OneDrive Personal Accounts * Resolve unable to remove '~/.config/onedrive/resume_upload: No such file or directory' if there is a session upload error and the resume file does not get created * Resolve handling of response codes when using 2 different systems when using '--upload-only' but the same OneDrive account and uploading the same filename to the same location ### Updated * Updated Travis CI building on LDC v1.11.0 for ARMHF builds * Updated Makefile to use 'install -D -m 644' rather than 'cp -raf' * Updated default config to be aligned to code defaults ## 2.1.4 - 2018-10-10 ### Fixed * Resolve syncing of OneDrive Personal Shared Folders due to OneDrive API change * Resolve incorrect systemd installation location(s) in Makefile ## 2.1.3 - 2018-10-04 ### Fixed * Resolve File download fails if the file is marked as malware in OneDrive * Resolve high CPU usage when running in monitor mode * Resolve how default path is set when running under systemd on headless systems * Resolve incorrectly nested configDir in X11 systems * Resolve Key not found: driveType * Resolve to validate filename length before download to conform with Linux FS limits * Resolve file handling to look for HTML ASCII codes which will cause uploads to fail * Resolve Key not found: expirationDateTime on session resume ### Added * Update Travis CI building to test build on ARM64 ## 2.1.2 - 2018-08-27 ### Fixed * Resolve skipping of symlinks in monitor mode * Resolve Gateway Timeout - JSONValue is not an object * Resolve systemd/user is not supported on CentOS / RHEL * Resolve HTTP request returned status code 429 (Too Many Requests) * Resolve handling of maximum path length calculation * Resolve 'The parent item is not in the local database' * Resolve Correctly handle file case sensitivity issues in same folder * Update unit files documentation link ## 2.1.1 - 2018-08-14 ### Fixed * Fix handling no remote delete of remote directories when using --no-remote-delete * Fix handling of no permission to access a local file / corrupt local file * Fix application crash when unable to access login.microsoft.com upon application startup ### Added * Build instructions for openSUSE Leap 15.0 ## 2.1.0 - 2018-08-10 ### Fixed * Fix handling of database exit scenarios when there is zero disk space left on drive where the items database resides * Fix handling of incorrect database permissions * Fix handling of different database versions to automatically re-create tables if version mis-match * Fix handling timeout when accessing the Microsoft OneDrive Service * Fix localFileModifiedTime to not use fraction seconds ### Added * Implement Feature: Add a progress bar for large uploads & downloads * Implement Feature: Make checkinterval for monitor configurable * Implement Feature: Upload Only Option that does not perform remote delete * Implement Feature: Add ability to skip symlinks * Add dependency, ebuild and build instructions for Gentoo distributions ### Changed * Build instructions for x86, x86_64 and ARM32 platforms * Travis CI files to automate building on x32, x64 and ARM32 architectures * Travis CI files to test built application against valid, invalid and problem files from previous issues ## 2.0.2 - 2018-07-18 ### Fixed * Fix systemd service install for builds with DESTDIR defined * Fix 'HTTP 412 - Precondition Failed' error handling * Gracefully handle OneDrive account password change * Update logic handling of --upload-only and --local-first ## 2.0.1 - 2018-07-11 ### Fixed * Resolve computeQuickXorHash generates a different hash when files are > 64Kb ## 2.0.0 - 2018-07-10 ### Fixed * Resolve conflict resolution issue during syncing - the client does not handle conflicts very well & keeps on adding the hostname to files * Resolve skilion #356 by adding additional check for 409 response from OneDrive * Resolve multiple versions of file shown on website after single upload * Resolve to gracefully fail when 'onedrive' process cannot get exclusive database lock * Resolve 'Key not found: fileSystemInfo' when then item is a remote item (OneDrive Personal) * Resolve skip_file config entry needs to be checked for any characters to escape * Resolve Microsoft Naming Convention not being followed correctly * Resolve Error when trying to upload a file with weird non printable characters present * Resolve Crash if file is locked by online editing (status code 423) * Resolve Resolve compilation issue with dmd-2.081.0 * Resolve skip_file configuration doesn't handle spaces or specified directory paths ### Added * Implement Feature: Add a flag to detect when the sync-folder is missing * Implement Travis CI for code testing ### Changed * Update Makefile to use DESTDIR variables * Update OneDrive Business maximum path length from 256 to 400 * Update OneDrive Business allowed characters for files and folders * Update sync_dir handling to use the absolute path for setting parameter to something other than ~/OneDrive via config file or command line * Update Fedora build instructions ## 1.1.2 - 2018-05-17 ### Fixed * Fix 4xx errors including (412 pre-condition, 409 conflict) * Fix Key not found: lastModifiedDateTime (OneDrive API change) * Fix configuration directory not found when run via init.d * Fix skilion Issues #73, #121, #132, #224, #257, #294, #295, #297, #298, #300, #306, #315, #320, #329, #334, #337, #341 ### Added * Add logging - log client activities to a file (/var/log/onedrive/%username%.onedrive.log or ~/onedrive.log) * Add https debugging as a flag * Add `--synchronize` to prevent from syncing when just blindly running the application * Add individual folder sync * Add sync from local directory first rather than download first then upload * Add upload long path check * Add upload only * Add check for max upload file size before attempting upload * Add systemd unit files for single & multi user configuration * Add init.d file for older init.d based services * Add Microsoft naming conventions and namespace validation for items that will be uploaded * Add remaining free space counter at client initialisation to avoid out of space upload issue * Add large file upload size check to align to OneDrive file size limitations * Add upload file size validation & retry if does not match * Add graceful handling of some fatal errors (OneDrive 5xx error handling) ## Unreleased - 2018-02-19 ### Fixed * Crash when the delta link is expired ### Changed * Disabled buffering on stdout ## 1.1.1 - 2018-01-20 ### Fixed * Wrong regex for parsing authentication uri ## 1.1.0 - 2018-01-19 ### Added * Support for shared folders (OneDrive Personal only) * `--download` option to only download changes * `DC` variable in Makefile to chose the compiler ### Changed * Print logs on stdout instead of stderr * Improve log messages ## 1.0.1 - 2017-08-01 ### Added * `--syncdir` option ### Changed * `--version` output simplified * Updated README ### Fixed * Fix crash caused by remotely deleted and recreated directories ## 1.0.0 - 2017-07-14 ### Added * `--version` option onedrive-2.3.13/LICENSE000066400000000000000000001045131360252424000144220ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . onedrive-2.3.13/Makefile.in000066400000000000000000000111271360252424000154600ustar00rootroot00000000000000package = @PACKAGE_NAME@ version = @PACKAGE_VERSION@ prefix = @prefix@ # we don't use @exec_prefix@ because it usually contains '${prefix}' literally # but we use @prefix@/bin/onedrive in the systemd unit files which are generated # from the configure script. # Thus, set exec_prefix unconditionally to prefix # Alternative approach would be add dep on sed, and do manual generation in the Makefile. # exec_prefix = @exec_prefix@ exec_prefix = @prefix@ datarootdir = @datarootdir@ datadir = @datadir@ srcdir = @srcdir@ bindir = @bindir@ mandir = @mandir@ sysconfdir = @sysconfdir@ docdir = $(datadir)/doc/$(package) VPATH = @srcdir@ INSTALL = @INSTALL@ NOTIFICATIONS = @NOTIFICATIONS@ HAVE_SYSTEMD = @HAVE_SYSTEMD@ systemduserunitdir = @systemduserunitdir@ systemdsystemunitdir = @systemdsystemunitdir@ curl_LIBS = @curl_LIBS@ sqlite_LIBS = @sqlite_LIBS@ notify_LIBS = @notify_LIBS@ COMPLETIONS = @COMPLETIONS@ BASH_COMPLETION_DIR = @BASH_COMPLETION_DIR@ ZSH_COMPLETION_DIR = @ZSH_COMPLETION_DIR@ DEBUG = @DEBUG@ DC = @DC@ DC_TYPE = @DC_TYPE@ DCFLAGS = @DCFLAGS@ DCFLAGS += -w -g -O -J. ifeq ($(DEBUG),yes) ifeq ($(DC_TYPE),dmd) DCFLAGS += -debug -gs else DCFLAGS += -d-debug -gc endif endif ifeq ($(NOTIFICATIONS),yes) NOTIF_VERSIONS=-version=NoPragma -version=NoGdk -version=Notifications # support ldc2 which needs -d prefix for version specification ifeq ($(DC_TYPE),ldc) NOTIF_VERSIONS := $(addprefix -d,$(NOTIF_VERSIONS)) endif DCFLAGS += $(NOTIF_VERSIONS) endif system_unit_files = contrib/systemd/onedrive@.service user_unit_files = contrib/systemd/onedrive.service DOCFILES = README.md config LICENSE CHANGELOG.md docs/Docker.md docs/INSTALL.md docs/Office365.md docs/USAGE.md ifneq ("$(wildcard /etc/redhat-release)","") RHEL = $(shell cat /etc/redhat-release | grep -E "(Red Hat Enterprise Linux Server|CentOS)" | wc -l) RHEL_VERSION = $(shell rpm --eval "%{rhel}") else RHEL = 0 RHEL_VERSION = 0 endif SOURCES = \ src/config.d \ src/itemdb.d \ src/log.d \ src/main.d \ src/monitor.d \ src/onedrive.d \ src/qxor.d \ src/selective.d \ src/sqlite.d \ src/sync.d \ src/upload.d \ src/util.d \ src/progress.d ifeq ($(NOTIFICATIONS),yes) SOURCES += src/notifications/notify.d src/notifications/dnotify.d endif all: onedrive clean: rm -f onedrive onedrive.o version rm -rf autom4te.cache rm -f config.log config.status # also remove files generated via ./configure distclean: clean rm -f Makefile contrib/pacman/PKGBUILD contrib/spec/onedrive.spec onedrive.1 \ $(system_unit_files) $(user_unit_files) onedrive: $(SOURCES) if [ -f .git/HEAD ] ; then \ git describe --tags > version ; \ else \ echo $(version) > version ; \ fi $(DC) $(DCFLAGS) $(addprefix -L,$(curl_LIBS)) $(addprefix -L,$(sqlite_LIBS)) $(addprefix -L,$(notify_LIBS)) -L-ldl $(SOURCES) -of$@ install: all $(INSTALL) -D onedrive $(DESTDIR)$(bindir)/onedrive $(INSTALL) -D onedrive.1 $(DESTDIR)$(mandir)/man1/onedrive.1 $(INSTALL) -D -m 644 contrib/logrotate/onedrive.logrotate $(DESTDIR)$(sysconfdir)/logrotate.d/onedrive mkdir -p $(DESTDIR)$(docdir) $(INSTALL) -D -m 644 $(DOCFILES) $(DESTDIR)$(docdir) ifeq ($(HAVE_SYSTEMD),yes) $(INSTALL) -d -m 0755 $(DESTDIR)$(systemduserunitdir) $(DESTDIR)$(systemdsystemunitdir) ifeq ($(RHEL),1) $(INSTALL) -m 0644 $(system_unit_files) $(DESTDIR)$(systemdsystemunitdir) $(INSTALL) -m 0644 $(user_unit_files) $(DESTDIR)$(systemdsystemunitdir) else $(INSTALL) -m 0644 $(system_unit_files) $(DESTDIR)$(systemdsystemunitdir) $(INSTALL) -m 0644 $(user_unit_files) $(DESTDIR)$(systemduserunitdir) endif else ifeq ($(RHEL_VERSION),6) install -D contrib/init.d/onedrive.init $(DESTDIR)/etc/init.d/onedrive install -D contrib/init.d/onedrive_service.sh $(DESTDIR)$(bindir)/onedrive_service.sh endif endif ifeq ($(COMPLETIONS),yes) $(INSTALL) -D -m 644 contrib/completions/complete.zsh $(DESTDIR)$(ZSH_COMPLETION_DIR)/_onedrive $(INSTALL) -D -m 644 contrib/completions/complete.bash $(DESTDIR)$(BASH_COMPLETION_DIR)/onedrive endif uninstall: rm -f $(DESTDIR)$(bindir)/onedrive rm -f $(DESTDIR)$(mandir)/man1/onedrive.1 rm -f $(DESTDIR)$(sysconfdir)/logrotate.d/onedrive ifeq ($(HAVE_SYSTEMD),yes) ifeq ($(RHEL),1) rm -f $(DESTDIR)$(systemdsystemunitdir)/onedrive*.service else rm -f $(DESTDIR)$(systemdsystemunitdir)/onedrive*.service rm -f $(DESTDIR)$(systemduserunitdir)/onedrive*.service endif else ifeq ($(RHEL_VERSION),6) rm -f $(DESTDIR)/etc/init.d/onedrive rm -f $(DESTDIR)$(bindir)/onedrive_service.sh endif endif for i in $(DOCFILES) ; do rm -f $(DESTDIR)$(docdir)/$$i ; done ifeq ($(COMPLETIONS),yes) rm -f $(DESTDIR)$(ZSH_COMPLETION_DIR)/_onedrive rm -f $(DESTDIR)$(BASH_COMPLETION_DIR)/onedrive endif onedrive-2.3.13/README.md000066400000000000000000000045431360252424000146760ustar00rootroot00000000000000# OneDrive Free Client [![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases) [![Release Date](https://img.shields.io/github/release-date/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases) [![Travis CI](https://img.shields.io/travis/com/abraunegg/onedrive)](https://travis-ci.com/abraunegg/onedrive/builds) [![Docker Build](https://img.shields.io/docker/automated/driveone/onedrive)](https://hub.docker.com/r/driveone/onedrive) [![Docker Pulls](https://img.shields.io/docker/pulls/driveone/onedrive)](https://hub.docker.com/r/driveone/onedrive) A complete tool to interact with OneDrive on Linux. Built following the UNIX philosophy ## Features * State caching * Real-Time file monitoring with Inotify * File upload / download validation to ensure data integrity * Resumable uploads * Support OneDrive for Business (part of Office 365) * Shared folders (OneDrive Personal) * SharePoint / Office 365 Shared Libraries (refer to README.Office365.md to configure) * Desktop notifications via libnotify ## What's missing * While local changes are uploaded right away, remote changes are delayed * No GUI ## Building and Installation See [docs/INSTALL.md](https://github.com/abraunegg/onedrive/blob/master/docs/INSTALL.md) ## Configuration and Usage See [docs/USAGE.md](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md) ## Docker support See [docs/Docker.md](https://github.com/abraunegg/onedrive/blob/master/docs/Docker.md) ## Sharepoint group drive in Office 365 business or education See [docs/Office365.md](https://github.com/abraunegg/onedrive/blob/master/docs/Office365.md) ## Reporting issues If you encounter any bugs you can report them here on Github. Before filing an issue be sure to: 1. Check the version of the application you are using `onedrive --version` and ensure that you are running either the latest [release](https://github.com/abraunegg/onedrive/releases) or built from master. 2. Fill in a new bug report using the [issue template](https://github.com/abraunegg/onedrive/issues/new?template=bug_report.md) 3. Generate a debug log for support using the following [process](https://github.com/abraunegg/onedrive/wiki/Generate-debug-log-for-support) 4. Upload the debug log to [pastebin](https://pastebin.com/) or archive and email to support@mynas.com.au onedrive-2.3.13/aclocal.m4000066400000000000000000000252401360252424000152540ustar00rootroot00000000000000# generated automatically by aclocal 1.16.1 -*- Autoconf -*- # Copyright (C) 1996-2018 Free Software Foundation, Inc. # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])]) dnl pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*- dnl serial 11 (pkg-config-0.29) dnl dnl Copyright © 2004 Scott James Remnant . dnl Copyright © 2012-2015 Dan Nicholson dnl dnl This program is free software; you can redistribute it and/or modify dnl it under the terms of the GNU General Public License as published by dnl the Free Software Foundation; either version 2 of the License, or dnl (at your option) any later version. dnl dnl This program is distributed in the hope that it will be useful, but dnl WITHOUT ANY WARRANTY; without even the implied warranty of dnl MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU dnl General Public License for more details. dnl dnl You should have received a copy of the GNU General Public License dnl along with this program; if not, write to the Free Software dnl Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA dnl 02111-1307, USA. dnl dnl As a special exception to the GNU General Public License, if you dnl distribute this file as part of a program that contains a dnl configuration script generated by Autoconf, you may include it under dnl the same distribution terms that you use for the rest of that dnl program. dnl PKG_PREREQ(MIN-VERSION) dnl ----------------------- dnl Since: 0.29 dnl dnl Verify that the version of the pkg-config macros are at least dnl MIN-VERSION. Unlike PKG_PROG_PKG_CONFIG, which checks the user's dnl installed version of pkg-config, this checks the developer's version dnl of pkg.m4 when generating configure. dnl dnl To ensure that this macro is defined, also add: dnl m4_ifndef([PKG_PREREQ], dnl [m4_fatal([must install pkg-config 0.29 or later before running autoconf/autogen])]) dnl dnl See the "Since" comment for each macro you use to see what version dnl of the macros you require. m4_defun([PKG_PREREQ], [m4_define([PKG_MACROS_VERSION], [0.29]) m4_if(m4_version_compare(PKG_MACROS_VERSION, [$1]), -1, [m4_fatal([pkg.m4 version $1 or higher is required but ]PKG_MACROS_VERSION[ found])]) ])dnl PKG_PREREQ dnl PKG_PROG_PKG_CONFIG([MIN-VERSION]) dnl ---------------------------------- dnl Since: 0.16 dnl dnl Search for the pkg-config tool and set the PKG_CONFIG variable to dnl first found in the path. Checks that the version of pkg-config found dnl is at least MIN-VERSION. If MIN-VERSION is not specified, 0.9.0 is dnl used since that's the first version where most current features of dnl pkg-config existed. AC_DEFUN([PKG_PROG_PKG_CONFIG], [m4_pattern_forbid([^_?PKG_[A-Z_]+$]) m4_pattern_allow([^PKG_CONFIG(_(PATH|LIBDIR|SYSROOT_DIR|ALLOW_SYSTEM_(CFLAGS|LIBS)))?$]) m4_pattern_allow([^PKG_CONFIG_(DISABLE_UNINSTALLED|TOP_BUILD_DIR|DEBUG_SPEW)$]) AC_ARG_VAR([PKG_CONFIG], [path to pkg-config utility]) AC_ARG_VAR([PKG_CONFIG_PATH], [directories to add to pkg-config's search path]) AC_ARG_VAR([PKG_CONFIG_LIBDIR], [path overriding pkg-config's built-in search path]) if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then AC_PATH_TOOL([PKG_CONFIG], [pkg-config]) fi if test -n "$PKG_CONFIG"; then _pkg_min_version=m4_default([$1], [0.9.0]) AC_MSG_CHECKING([pkg-config is at least version $_pkg_min_version]) if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) PKG_CONFIG="" fi fi[]dnl ])dnl PKG_PROG_PKG_CONFIG dnl PKG_CHECK_EXISTS(MODULES, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) dnl ------------------------------------------------------------------- dnl Since: 0.18 dnl dnl Check to see whether a particular set of modules exists. Similar to dnl PKG_CHECK_MODULES(), but does not set variables or print errors. dnl dnl Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG]) dnl only at the first occurence in configure.ac, so if the first place dnl it's called might be skipped (such as if it is within an "if", you dnl have to call PKG_CHECK_EXISTS manually AC_DEFUN([PKG_CHECK_EXISTS], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl if test -n "$PKG_CONFIG" && \ AC_RUN_LOG([$PKG_CONFIG --exists --print-errors "$1"]); then m4_default([$2], [:]) m4_ifvaln([$3], [else $3])dnl fi]) dnl _PKG_CONFIG([VARIABLE], [COMMAND], [MODULES]) dnl --------------------------------------------- dnl Internal wrapper calling pkg-config via PKG_CONFIG and setting dnl pkg_failed based on the result. m4_define([_PKG_CONFIG], [if test -n "$$1"; then pkg_cv_[]$1="$$1" elif test -n "$PKG_CONFIG"; then PKG_CHECK_EXISTS([$3], [pkg_cv_[]$1=`$PKG_CONFIG --[]$2 "$3" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes ], [pkg_failed=yes]) else pkg_failed=untried fi[]dnl ])dnl _PKG_CONFIG dnl _PKG_SHORT_ERRORS_SUPPORTED dnl --------------------------- dnl Internal check to see if pkg-config supports short errors. AC_DEFUN([_PKG_SHORT_ERRORS_SUPPORTED], [AC_REQUIRE([PKG_PROG_PKG_CONFIG]) if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi[]dnl ])dnl _PKG_SHORT_ERRORS_SUPPORTED dnl PKG_CHECK_MODULES(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND], dnl [ACTION-IF-NOT-FOUND]) dnl -------------------------------------------------------------- dnl Since: 0.4.0 dnl dnl Note that if there is a possibility the first call to dnl PKG_CHECK_MODULES might not happen, you should be sure to include an dnl explicit call to PKG_PROG_PKG_CONFIG in your configure.ac AC_DEFUN([PKG_CHECK_MODULES], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl pkg_failed=no AC_MSG_CHECKING([for $1]) _PKG_CONFIG([$1][_CFLAGS], [cflags], [$2]) _PKG_CONFIG([$1][_LIBS], [libs], [$2]) m4_define([_PKG_TEXT], [Alternatively, you may set the environment variables $1[]_CFLAGS and $1[]_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details.]) if test $pkg_failed = yes; then AC_MSG_RESULT([no]) _PKG_SHORT_ERRORS_SUPPORTED if test $_pkg_short_errors_supported = yes; then $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` else $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD m4_default([$4], [AC_MSG_ERROR( [Package requirements ($2) were not met: $$1_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. _PKG_TEXT])[]dnl ]) elif test $pkg_failed = untried; then AC_MSG_RESULT([no]) m4_default([$4], [AC_MSG_FAILURE( [The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. _PKG_TEXT To get pkg-config, see .])[]dnl ]) else $1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS $1[]_LIBS=$pkg_cv_[]$1[]_LIBS AC_MSG_RESULT([yes]) $3 fi[]dnl ])dnl PKG_CHECK_MODULES dnl PKG_CHECK_MODULES_STATIC(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND], dnl [ACTION-IF-NOT-FOUND]) dnl --------------------------------------------------------------------- dnl Since: 0.29 dnl dnl Checks for existence of MODULES and gathers its build flags with dnl static libraries enabled. Sets VARIABLE-PREFIX_CFLAGS from --cflags dnl and VARIABLE-PREFIX_LIBS from --libs. dnl dnl Note that if there is a possibility the first call to dnl PKG_CHECK_MODULES_STATIC might not happen, you should be sure to dnl include an explicit call to PKG_PROG_PKG_CONFIG in your dnl configure.ac. AC_DEFUN([PKG_CHECK_MODULES_STATIC], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl _save_PKG_CONFIG=$PKG_CONFIG PKG_CONFIG="$PKG_CONFIG --static" PKG_CHECK_MODULES($@) PKG_CONFIG=$_save_PKG_CONFIG[]dnl ])dnl PKG_CHECK_MODULES_STATIC dnl PKG_INSTALLDIR([DIRECTORY]) dnl ------------------------- dnl Since: 0.27 dnl dnl Substitutes the variable pkgconfigdir as the location where a module dnl should install pkg-config .pc files. By default the directory is dnl $libdir/pkgconfig, but the default can be changed by passing dnl DIRECTORY. The user can override through the --with-pkgconfigdir dnl parameter. AC_DEFUN([PKG_INSTALLDIR], [m4_pushdef([pkg_default], [m4_default([$1], ['${libdir}/pkgconfig'])]) m4_pushdef([pkg_description], [pkg-config installation directory @<:@]pkg_default[@:>@]) AC_ARG_WITH([pkgconfigdir], [AS_HELP_STRING([--with-pkgconfigdir], pkg_description)],, [with_pkgconfigdir=]pkg_default) AC_SUBST([pkgconfigdir], [$with_pkgconfigdir]) m4_popdef([pkg_default]) m4_popdef([pkg_description]) ])dnl PKG_INSTALLDIR dnl PKG_NOARCH_INSTALLDIR([DIRECTORY]) dnl -------------------------------- dnl Since: 0.27 dnl dnl Substitutes the variable noarch_pkgconfigdir as the location where a dnl module should install arch-independent pkg-config .pc files. By dnl default the directory is $datadir/pkgconfig, but the default can be dnl changed by passing DIRECTORY. The user can override through the dnl --with-noarch-pkgconfigdir parameter. AC_DEFUN([PKG_NOARCH_INSTALLDIR], [m4_pushdef([pkg_default], [m4_default([$1], ['${datadir}/pkgconfig'])]) m4_pushdef([pkg_description], [pkg-config arch-independent installation directory @<:@]pkg_default[@:>@]) AC_ARG_WITH([noarch-pkgconfigdir], [AS_HELP_STRING([--with-noarch-pkgconfigdir], pkg_description)],, [with_noarch_pkgconfigdir=]pkg_default) AC_SUBST([noarch_pkgconfigdir], [$with_noarch_pkgconfigdir]) m4_popdef([pkg_default]) m4_popdef([pkg_description]) ])dnl PKG_NOARCH_INSTALLDIR dnl PKG_CHECK_VAR(VARIABLE, MODULE, CONFIG-VARIABLE, dnl [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) dnl ------------------------------------------- dnl Since: 0.28 dnl dnl Retrieves the value of the pkg-config variable for the given module. AC_DEFUN([PKG_CHECK_VAR], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl AC_ARG_VAR([$1], [value of $3 for $2, overriding pkg-config])dnl _PKG_CONFIG([$1], [variable="][$3]["], [$2]) AS_VAR_COPY([$1], [pkg_cv_][$1]) AS_VAR_IF([$1], [""], [$5], [$4])dnl ])dnl PKG_CHECK_VAR onedrive-2.3.13/config000066400000000000000000000020041360252424000145750ustar00rootroot00000000000000# Configuration for OneDrive Linux Client # This file contains the list of supported configuration fields # with their default values. # All values need to be enclosed in quotes # When changing a config option below, remove the '#' from the start of the line # For explanations of all config options below see docs/USAGE.md or the man page. # # sync_dir = "~/OneDrive" # skip_file = "~*|.~*|*.tmp" # monitor_interval = "45" # skip_dir = "" # log_dir = "/var/log/onedrive/" # drive_id = "" # upload_only = "false" # check_nomount = "false" # check_nosync = "false" # download_only = "false" # disable_notifications = "false" # disable_upload_validation = "false" # enable_logging = "false" # force_http_11 = "false" # force_http_2 = "false" # local_first = "false" # no_remote_delete = "false" # skip_symlinks = "false" # debug_https = "false" # skip_dotfiles = "false" # dry_run = "false" # min_notify_changes = "5" # monitor_log_frequency = "5" # monitor_fullscan_frequency = "10" # sync_root_files = "false" # user_agent = "" onedrive-2.3.13/configure000077500000000000000000003311121360252424000153210ustar00rootroot00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.69 for onedrive v2.3.13. # # Report bugs to . # # # Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. # # # This configure script is free software; the Free Software Foundation # gives unlimited permission to copy, distribute and modify it. ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # Use a proper internal environment variable to ensure we don't fall # into an infinite loop, continuously re-executing ourselves. if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then _as_can_reexec=no; export _as_can_reexec; # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. $as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 as_fn_exit 255 fi # We don't want this to propagate to other subprocesses. { _as_can_reexec=; unset _as_can_reexec;} if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which # is contrary to our usage. Disable this feature. alias -g '\${1+\"\$@\"}'='\"\$@\"' setopt NO_GLOB_SUBST else case \`(set -o) 2>/dev/null\` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi " as_required="as_fn_return () { (exit \$1); } as_fn_success () { as_fn_return 0; } as_fn_failure () { as_fn_return 1; } as_fn_ret_success () { return 0; } as_fn_ret_failure () { return 1; } exitcode=0 as_fn_success || { exitcode=1; echo as_fn_success failed.; } as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : else exitcode=1; echo positional parameters were not saved. fi test x\$exitcode = x0 || exit 1 test -x / || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1" if (eval "$as_required") 2>/dev/null; then : as_have_required=yes else as_have_required=no fi if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_found=false for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. as_found=: case $as_dir in #( /*) for as_base in sh bash ksh sh5; do # Try only shells that exist, to save several forks. as_shell=$as_dir/$as_base if { test -f "$as_shell" || test -f "$as_shell.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : CONFIG_SHELL=$as_shell as_have_required=yes if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : break 2 fi fi done;; esac as_found=false done $as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : CONFIG_SHELL=$SHELL as_have_required=yes fi; } IFS=$as_save_IFS if test "x$CONFIG_SHELL" != x; then : export CONFIG_SHELL # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. $as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 exit 255 fi if test x$as_have_required = xno; then : $as_echo "$0: This script requires a shell more modern than all" $as_echo "$0: the shells that I found on your system." if test x${ZSH_VERSION+set} = xset ; then $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" $as_echo "$0: be upgraded to zsh 4.3.4 or later." else $as_echo "$0: Please tell bug-autoconf@gnu.org and $0: https://github.com/abraunegg/onedrive about your $0: system, including any error possibly output before this $0: message. Then install a modern shell, or manually run $0: the script under such a shell if you do have one." fi exit 1 fi fi fi SHELL=${CONFIG_SHELL-/bin/sh} export SHELL # Unset more variables known to interfere with behavior of common tools. CLICOLOR_FORCE= GREP_OPTIONS= unset CLICOLOR_FORCE GREP_OPTIONS ## --------------------- ## ## M4sh Shell Functions. ## ## --------------------- ## # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits as_lineno_1=$LINENO as_lineno_1a=$LINENO as_lineno_2=$LINENO as_lineno_2a=$LINENO eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) sed -n ' p /[$]LINENO/= ' <$as_myself | sed ' s/[$]LINENO.*/&-/ t lineno b :lineno N :loop s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ t loop s/-\n.*// ' >$as_me.lineno && chmod +x "$as_me.lineno" || { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } # If we had to re-execute with $CONFIG_SHELL, we're ensured to have # already done that, so ensure we don't try to do so again and fall # in an infinite loop. This has already happened in practice. _as_can_reexec=no; export _as_can_reexec # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). . "./$as_me.lineno" # Exit status is that of the last command. exit } ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" test -n "$DJDIR" || exec 7<&0 &1 # Name of the host. # hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, # so uname gets run too. ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` # # Initializations. # ac_default_prefix=/usr/local ac_clean_files= ac_config_libobj_dir=. LIBOBJS= cross_compiling=no subdirs= MFLAGS= MAKEFLAGS= # Identity of this package. PACKAGE_NAME='onedrive' PACKAGE_TARNAME='onedrive' PACKAGE_VERSION='v2.3.13' PACKAGE_STRING='onedrive v2.3.13' PACKAGE_BUGREPORT='https://github.com/abraunegg/onedrive' PACKAGE_URL='' ac_unique_file="src/main.d" ac_subst_vars='LTLIBOBJS LIBOBJS DEBUG ZSH_COMPLETION_DIR BASH_COMPLETION_DIR bashcompdir COMPLETIONS NOTIFICATIONS notify_LIBS notify_CFLAGS HAVE_SYSTEMD systemduserunitdir systemdsystemunitdir sqlite_LIBS sqlite_CFLAGS curl_LIBS curl_CFLAGS PACKAGE_DATE DC_TYPE PKG_CONFIG_LIBDIR PKG_CONFIG_PATH PKG_CONFIG INSTALL_DATA INSTALL_SCRIPT INSTALL_PROGRAM DCFLAGS DC target_alias host_alias build_alias LIBS ECHO_T ECHO_N ECHO_C DEFS mandir localedir libdir psdir pdfdir dvidir htmldir infodir docdir oldincludedir includedir localstatedir sharedstatedir sysconfdir datadir datarootdir libexecdir sbindir bindir program_transform_name prefix exec_prefix PACKAGE_URL PACKAGE_BUGREPORT PACKAGE_STRING PACKAGE_VERSION PACKAGE_TARNAME PACKAGE_NAME PATH_SEPARATOR SHELL' ac_subst_files='' ac_user_opts=' enable_option_checking enable_version_check with_systemdsystemunitdir with_systemduserunitdir enable_notifications enable_completions with_bash_completion_dir with_zsh_completion_dir enable_debug ' ac_precious_vars='build_alias host_alias target_alias DC DCFLAGS PKG_CONFIG PKG_CONFIG_PATH PKG_CONFIG_LIBDIR curl_CFLAGS curl_LIBS sqlite_CFLAGS sqlite_LIBS notify_CFLAGS notify_LIBS bashcompdir' # Initialize some variables set by options. ac_init_help= ac_init_version=false ac_unrecognized_opts= ac_unrecognized_sep= # The variables have the same names as the options, with # dashes changed to underlines. cache_file=/dev/null exec_prefix=NONE no_create= no_recursion= prefix=NONE program_prefix=NONE program_suffix=NONE program_transform_name=s,x,x, silent= site= srcdir= verbose= x_includes=NONE x_libraries=NONE # Installation directory options. # These are left unexpanded so users can "make install exec_prefix=/foo" # and all the variables that are supposed to be based on exec_prefix # by default will actually change. # Use braces instead of parens because sh, perl, etc. also accept them. # (The list follows the same order as the GNU Coding Standards.) bindir='${exec_prefix}/bin' sbindir='${exec_prefix}/sbin' libexecdir='${exec_prefix}/libexec' datarootdir='${prefix}/share' datadir='${datarootdir}' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' includedir='${prefix}/include' oldincludedir='/usr/include' docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' infodir='${datarootdir}/info' htmldir='${docdir}' dvidir='${docdir}' pdfdir='${docdir}' psdir='${docdir}' libdir='${exec_prefix}/lib' localedir='${datarootdir}/locale' mandir='${datarootdir}/man' ac_prev= ac_dashdash= for ac_option do # If the previous option needs an argument, assign it. if test -n "$ac_prev"; then eval $ac_prev=\$ac_option ac_prev= continue fi case $ac_option in *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; *=) ac_optarg= ;; *) ac_optarg=yes ;; esac # Accept the important Cygnus configure options, so we can diagnose typos. case $ac_dashdash$ac_option in --) ac_dashdash=yes ;; -bindir | --bindir | --bindi | --bind | --bin | --bi) ac_prev=bindir ;; -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) bindir=$ac_optarg ;; -build | --build | --buil | --bui | --bu) ac_prev=build_alias ;; -build=* | --build=* | --buil=* | --bui=* | --bu=*) build_alias=$ac_optarg ;; -cache-file | --cache-file | --cache-fil | --cache-fi \ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) ac_prev=cache_file ;; -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) cache_file=$ac_optarg ;; --config-cache | -C) cache_file=config.cache ;; -datadir | --datadir | --datadi | --datad) ac_prev=datadir ;; -datadir=* | --datadir=* | --datadi=* | --datad=*) datadir=$ac_optarg ;; -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ | --dataroo | --dataro | --datar) ac_prev=datarootdir ;; -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) datarootdir=$ac_optarg ;; -disable-* | --disable-*) ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=no ;; -docdir | --docdir | --docdi | --doc | --do) ac_prev=docdir ;; -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) docdir=$ac_optarg ;; -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) ac_prev=dvidir ;; -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) dvidir=$ac_optarg ;; -enable-* | --enable-*) ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=\$ac_optarg ;; -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ | --exec | --exe | --ex) ac_prev=exec_prefix ;; -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ | --exec=* | --exe=* | --ex=*) exec_prefix=$ac_optarg ;; -gas | --gas | --ga | --g) # Obsolete; use --with-gas. with_gas=yes ;; -help | --help | --hel | --he | -h) ac_init_help=long ;; -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) ac_init_help=recursive ;; -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) ac_init_help=short ;; -host | --host | --hos | --ho) ac_prev=host_alias ;; -host=* | --host=* | --hos=* | --ho=*) host_alias=$ac_optarg ;; -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) ac_prev=htmldir ;; -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ | --ht=*) htmldir=$ac_optarg ;; -includedir | --includedir | --includedi | --included | --include \ | --includ | --inclu | --incl | --inc) ac_prev=includedir ;; -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ | --includ=* | --inclu=* | --incl=* | --inc=*) includedir=$ac_optarg ;; -infodir | --infodir | --infodi | --infod | --info | --inf) ac_prev=infodir ;; -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) infodir=$ac_optarg ;; -libdir | --libdir | --libdi | --libd) ac_prev=libdir ;; -libdir=* | --libdir=* | --libdi=* | --libd=*) libdir=$ac_optarg ;; -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ | --libexe | --libex | --libe) ac_prev=libexecdir ;; -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ | --libexe=* | --libex=* | --libe=*) libexecdir=$ac_optarg ;; -localedir | --localedir | --localedi | --localed | --locale) ac_prev=localedir ;; -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) localedir=$ac_optarg ;; -localstatedir | --localstatedir | --localstatedi | --localstated \ | --localstate | --localstat | --localsta | --localst | --locals) ac_prev=localstatedir ;; -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) localstatedir=$ac_optarg ;; -mandir | --mandir | --mandi | --mand | --man | --ma | --m) ac_prev=mandir ;; -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) mandir=$ac_optarg ;; -nfp | --nfp | --nf) # Obsolete; use --without-fp. with_fp=no ;; -no-create | --no-create | --no-creat | --no-crea | --no-cre \ | --no-cr | --no-c | -n) no_create=yes ;; -no-recursion | --no-recursion | --no-recursio | --no-recursi \ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) no_recursion=yes ;; -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ | --oldin | --oldi | --old | --ol | --o) ac_prev=oldincludedir ;; -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) oldincludedir=$ac_optarg ;; -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) ac_prev=prefix ;; -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) prefix=$ac_optarg ;; -program-prefix | --program-prefix | --program-prefi | --program-pref \ | --program-pre | --program-pr | --program-p) ac_prev=program_prefix ;; -program-prefix=* | --program-prefix=* | --program-prefi=* \ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) program_prefix=$ac_optarg ;; -program-suffix | --program-suffix | --program-suffi | --program-suff \ | --program-suf | --program-su | --program-s) ac_prev=program_suffix ;; -program-suffix=* | --program-suffix=* | --program-suffi=* \ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) program_suffix=$ac_optarg ;; -program-transform-name | --program-transform-name \ | --program-transform-nam | --program-transform-na \ | --program-transform-n | --program-transform- \ | --program-transform | --program-transfor \ | --program-transfo | --program-transf \ | --program-trans | --program-tran \ | --progr-tra | --program-tr | --program-t) ac_prev=program_transform_name ;; -program-transform-name=* | --program-transform-name=* \ | --program-transform-nam=* | --program-transform-na=* \ | --program-transform-n=* | --program-transform-=* \ | --program-transform=* | --program-transfor=* \ | --program-transfo=* | --program-transf=* \ | --program-trans=* | --program-tran=* \ | --progr-tra=* | --program-tr=* | --program-t=*) program_transform_name=$ac_optarg ;; -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) ac_prev=pdfdir ;; -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) pdfdir=$ac_optarg ;; -psdir | --psdir | --psdi | --psd | --ps) ac_prev=psdir ;; -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) psdir=$ac_optarg ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) silent=yes ;; -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ | --sbi=* | --sb=*) sbindir=$ac_optarg ;; -sharedstatedir | --sharedstatedir | --sharedstatedi \ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ | --sharedst | --shareds | --shared | --share | --shar \ | --sha | --sh) ac_prev=sharedstatedir ;; -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ | --sha=* | --sh=*) sharedstatedir=$ac_optarg ;; -site | --site | --sit) ac_prev=site ;; -site=* | --site=* | --sit=*) site=$ac_optarg ;; -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) ac_prev=srcdir ;; -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) srcdir=$ac_optarg ;; -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ | --syscon | --sysco | --sysc | --sys | --sy) ac_prev=sysconfdir ;; -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) sysconfdir=$ac_optarg ;; -target | --target | --targe | --targ | --tar | --ta | --t) ac_prev=target_alias ;; -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) target_alias=$ac_optarg ;; -v | -verbose | --verbose | --verbos | --verbo | --verb) verbose=yes ;; -version | --version | --versio | --versi | --vers | -V) ac_init_version=: ;; -with-* | --with-*) ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=\$ac_optarg ;; -without-* | --without-*) ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=no ;; --x) # Obsolete; use --with-x. with_x=yes ;; -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ | --x-incl | --x-inc | --x-in | --x-i) ac_prev=x_includes ;; -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) x_includes=$ac_optarg ;; -x-libraries | --x-libraries | --x-librarie | --x-librari \ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) ac_prev=x_libraries ;; -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) x_libraries=$ac_optarg ;; -*) as_fn_error $? "unrecognized option: \`$ac_option' Try \`$0 --help' for more information" ;; *=*) ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` # Reject names that are not valid shell variable names. case $ac_envvar in #( '' | [0-9]* | *[!_$as_cr_alnum]* ) as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; esac eval $ac_envvar=\$ac_optarg export $ac_envvar ;; *) # FIXME: should be removed in autoconf 3.0. $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" ;; esac done if test -n "$ac_prev"; then ac_option=--`echo $ac_prev | sed 's/_/-/g'` as_fn_error $? "missing argument to $ac_option" fi if test -n "$ac_unrecognized_opts"; then case $enable_option_checking in no) ;; fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; esac fi # Check all directory arguments for consistency. for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ datadir sysconfdir sharedstatedir localstatedir includedir \ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ libdir localedir mandir do eval ac_val=\$$ac_var # Remove trailing slashes. case $ac_val in */ ) ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` eval $ac_var=\$ac_val;; esac # Be sure to have absolute directory names. case $ac_val in [\\/$]* | ?:[\\/]* ) continue;; NONE | '' ) case $ac_var in *prefix ) continue;; esac;; esac as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" done # There might be people who depend on the old broken behavior: `$host' # used to hold the argument of --host etc. # FIXME: To remove some day. build=$build_alias host=$host_alias target=$target_alias # FIXME: To remove some day. if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi fi ac_tool_prefix= test -n "$host_alias" && ac_tool_prefix=$host_alias- test "$silent" = yes && exec 6>/dev/null ac_pwd=`pwd` && test -n "$ac_pwd" && ac_ls_di=`ls -di .` && ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || as_fn_error $? "working directory cannot be determined" test "X$ac_ls_di" = "X$ac_pwd_ls_di" || as_fn_error $? "pwd does not report name of working directory" # Find the source files, if location was not specified. if test -z "$srcdir"; then ac_srcdir_defaulted=yes # Try the directory containing this script, then the parent directory. ac_confdir=`$as_dirname -- "$as_myself" || $as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_myself" : 'X\(//\)[^/]' \| \ X"$as_myself" : 'X\(//\)$' \| \ X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_myself" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` srcdir=$ac_confdir if test ! -r "$srcdir/$ac_unique_file"; then srcdir=.. fi else ac_srcdir_defaulted=no fi if test ! -r "$srcdir/$ac_unique_file"; then test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" fi ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" ac_abs_confdir=`( cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" pwd)` # When building in place, set srcdir=. if test "$ac_abs_confdir" = "$ac_pwd"; then srcdir=. fi # Remove unnecessary trailing slashes from srcdir. # Double slashes in file names in object file debugging info # mess up M-x gdb in Emacs. case $srcdir in */) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; esac for ac_var in $ac_precious_vars; do eval ac_env_${ac_var}_set=\${${ac_var}+set} eval ac_env_${ac_var}_value=\$${ac_var} eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} eval ac_cv_env_${ac_var}_value=\$${ac_var} done # # Report the --help message. # if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF \`configure' configures onedrive v2.3.13 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... To assign environment variables (e.g., CC, CFLAGS...), specify them as VAR=VALUE. See below for descriptions of some of the useful variables. Defaults for the options are specified in brackets. Configuration: -h, --help display this help and exit --help=short display options specific to this package --help=recursive display the short help of all the included packages -V, --version display version information and exit -q, --quiet, --silent do not print \`checking ...' messages --cache-file=FILE cache test results in FILE [disabled] -C, --config-cache alias for \`--cache-file=config.cache' -n, --no-create do not create output files --srcdir=DIR find the sources in DIR [configure dir or \`..'] Installation directories: --prefix=PREFIX install architecture-independent files in PREFIX [$ac_default_prefix] --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX [PREFIX] By default, \`make install' will install all the files in \`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify an installation prefix other than \`$ac_default_prefix' using \`--prefix', for instance \`--prefix=\$HOME'. For better control, use the options below. Fine tuning of the installation directories: --bindir=DIR user executables [EPREFIX/bin] --sbindir=DIR system admin executables [EPREFIX/sbin] --libexecdir=DIR program executables [EPREFIX/libexec] --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] --datadir=DIR read-only architecture-independent data [DATAROOTDIR] --infodir=DIR info documentation [DATAROOTDIR/info] --localedir=DIR locale-dependent data [DATAROOTDIR/locale] --mandir=DIR man documentation [DATAROOTDIR/man] --docdir=DIR documentation root [DATAROOTDIR/doc/onedrive] --htmldir=DIR html documentation [DOCDIR] --dvidir=DIR dvi documentation [DOCDIR] --pdfdir=DIR pdf documentation [DOCDIR] --psdir=DIR ps documentation [DOCDIR] _ACEOF cat <<\_ACEOF _ACEOF fi if test -n "$ac_init_help"; then case $ac_init_help in short | recursive ) echo "Configuration of onedrive v2.3.13:";; esac cat <<\_ACEOF Optional Features: --disable-option-checking ignore unrecognized --enable/--with options --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] --disable-version-check Disable checks of compiler version during configure time --enable-notifications Enable desktop notifications via libnotify --enable-completions Install shell completions for bash and zsh --enable-debug Pass debug option to the compiler Optional Packages: --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) --with-systemdsystemunitdir=DIR Directory for systemd system service files --with-systemduserunitdir=DIR Directory for systemd user service files --with-bash-completion-dir=DIR Directory for bash completion files --with-zsh-completion-dir=DIR Directory for zsh completion files Some influential environment variables: DC D compiler executable DCFLAGS flags for D compiler PKG_CONFIG path to pkg-config utility PKG_CONFIG_PATH directories to add to pkg-config's search path PKG_CONFIG_LIBDIR path overriding pkg-config's built-in search path curl_CFLAGS C compiler flags for curl, overriding pkg-config curl_LIBS linker flags for curl, overriding pkg-config sqlite_CFLAGS C compiler flags for sqlite, overriding pkg-config sqlite_LIBS linker flags for sqlite, overriding pkg-config notify_CFLAGS C compiler flags for notify, overriding pkg-config notify_LIBS linker flags for notify, overriding pkg-config bashcompdir value of completionsdir for bash-completion, overriding pkg-config Use these variables to override the choices made by `configure' or to help it to find libraries and programs with nonstandard names/locations. Report bugs to . _ACEOF ac_status=$? fi if test "$ac_init_help" = "recursive"; then # If there are subdirs, report their specific --help. for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue test -d "$ac_dir" || { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || continue ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix cd "$ac_dir" || { ac_status=$?; continue; } # Check for guested configure. if test -f "$ac_srcdir/configure.gnu"; then echo && $SHELL "$ac_srcdir/configure.gnu" --help=recursive elif test -f "$ac_srcdir/configure"; then echo && $SHELL "$ac_srcdir/configure" --help=recursive else $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 fi || ac_status=$? cd "$ac_pwd" || { ac_status=$?; break; } done fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF onedrive configure v2.3.13 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF exit fi ## ------------------------ ## ## Autoconf initialization. ## ## ------------------------ ## cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. It was created by onedrive $as_me v2.3.13, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ _ACEOF exec 5>>config.log { cat <<_ASUNAME ## --------- ## ## Platform. ## ## --------- ## hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` /bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` /bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` /usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` /bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` /bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` _ASUNAME as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. $as_echo "PATH: $as_dir" done IFS=$as_save_IFS } >&5 cat >&5 <<_ACEOF ## ----------- ## ## Core tests. ## ## ----------- ## _ACEOF # Keep a trace of the command line. # Strip out --no-create and --no-recursion so they do not pile up. # Strip out --silent because we don't want to record it for future runs. # Also quote any args containing shell meta-characters. # Make two passes to allow for proper duplicate-argument suppression. ac_configure_args= ac_configure_args0= ac_configure_args1= ac_must_keep_next=false for ac_pass in 1 2 do for ac_arg do case $ac_arg in -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) continue ;; *\'*) ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; esac case $ac_pass in 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; 2) as_fn_append ac_configure_args1 " '$ac_arg'" if test $ac_must_keep_next = true; then ac_must_keep_next=false # Got value, back to normal. else case $ac_arg in *=* | --config-cache | -C | -disable-* | --disable-* \ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ | -with-* | --with-* | -without-* | --without-* | --x) case "$ac_configure_args0 " in "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; esac ;; -* ) ac_must_keep_next=true ;; esac fi as_fn_append ac_configure_args " '$ac_arg'" ;; esac done done { ac_configure_args0=; unset ac_configure_args0;} { ac_configure_args1=; unset ac_configure_args1;} # When interrupted or exit'd, cleanup temporary files, and complete # config.log. We remove comments because anyway the quotes in there # would cause problems or look ugly. # WARNING: Use '\'' to represent an apostrophe within the trap. # WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. trap 'exit_status=$? # Save into config.log some information that might help in debugging. { echo $as_echo "## ---------------- ## ## Cache variables. ## ## ---------------- ##" echo # The following way of writing the cache mishandles newlines in values, ( for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( *${as_nl}ac_space=\ *) sed -n \ "s/'\''/'\''\\\\'\'''\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" ;; #( *) sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) echo $as_echo "## ----------------- ## ## Output variables. ## ## ----------------- ##" echo for ac_var in $ac_subst_vars do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo if test -n "$ac_subst_files"; then $as_echo "## ------------------- ## ## File substitutions. ## ## ------------------- ##" echo for ac_var in $ac_subst_files do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo fi if test -s confdefs.h; then $as_echo "## ----------- ## ## confdefs.h. ## ## ----------- ##" echo cat confdefs.h echo fi test "$ac_signal" != 0 && $as_echo "$as_me: caught signal $ac_signal" $as_echo "$as_me: exit $exit_status" } >&5 rm -f core *.core core.conftest.* && rm -f -r conftest* confdefs* conf$$* $ac_clean_files && exit $exit_status ' 0 for ac_signal in 1 2 13 15; do trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal done ac_signal=0 # confdefs.h avoids OS command line length limits that DEFS can exceed. rm -f -r conftest* confdefs.h $as_echo "/* confdefs.h */" > confdefs.h # Predefined preprocessor variables. cat >>confdefs.h <<_ACEOF #define PACKAGE_NAME "$PACKAGE_NAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_TARNAME "$PACKAGE_TARNAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_VERSION "$PACKAGE_VERSION" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_STRING "$PACKAGE_STRING" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_URL "$PACKAGE_URL" _ACEOF # Let the site file select an alternate cache file if it wants to. # Prefer an explicitly selected file to automatically selected ones. ac_site_file1=NONE ac_site_file2=NONE if test -n "$CONFIG_SITE"; then # We do not want a PATH search for config.site. case $CONFIG_SITE in #(( -*) ac_site_file1=./$CONFIG_SITE;; */*) ac_site_file1=$CONFIG_SITE;; *) ac_site_file1=./$CONFIG_SITE;; esac elif test "x$prefix" != xNONE; then ac_site_file1=$prefix/share/config.site ac_site_file2=$prefix/etc/config.site else ac_site_file1=$ac_default_prefix/share/config.site ac_site_file2=$ac_default_prefix/etc/config.site fi for ac_site_file in "$ac_site_file1" "$ac_site_file2" do test "x$ac_site_file" = xNONE && continue if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 $as_echo "$as_me: loading site script $ac_site_file" >&6;} sed 's/^/| /' "$ac_site_file" >&5 . "$ac_site_file" \ || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "failed to load site script $ac_site_file See \`config.log' for more details" "$LINENO" 5; } fi done if test -r "$cache_file"; then # Some versions of bash will fail to source /dev/null (special files # actually), so we avoid doing that. DJGPP emulates it as a regular file. if test /dev/null != "$cache_file" && test -f "$cache_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 $as_echo "$as_me: loading cache $cache_file" >&6;} case $cache_file in [\\/]* | ?:[\\/]* ) . "$cache_file";; *) . "./$cache_file";; esac fi else { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 $as_echo "$as_me: creating cache $cache_file" >&6;} >$cache_file fi # Check that the precious variables saved in the cache have kept the same # value. ac_cache_corrupted=false for ac_var in $ac_precious_vars; do eval ac_old_set=\$ac_cv_env_${ac_var}_set eval ac_new_set=\$ac_env_${ac_var}_set eval ac_old_val=\$ac_cv_env_${ac_var}_value eval ac_new_val=\$ac_env_${ac_var}_value case $ac_old_set,$ac_new_set in set,) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} ac_cache_corrupted=: ;; ,set) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} ac_cache_corrupted=: ;; ,);; *) if test "x$ac_old_val" != "x$ac_new_val"; then # differences in whitespace do not lead to failure. ac_old_val_w=`echo x $ac_old_val` ac_new_val_w=`echo x $ac_new_val` if test "$ac_old_val_w" != "$ac_new_val_w"; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 $as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} ac_cache_corrupted=: else { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 $as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} eval $ac_var=\$ac_old_val fi { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 $as_echo "$as_me: former value: \`$ac_old_val'" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 $as_echo "$as_me: current value: \`$ac_new_val'" >&2;} fi;; esac # Pass precious variables to config.status. if test "$ac_new_set" = set; then case $ac_new_val in *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; *) ac_arg=$ac_var=$ac_new_val ;; esac case " $ac_configure_args " in *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. *) as_fn_append ac_configure_args " '$ac_arg'" ;; esac fi done if $ac_cache_corrupted; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 $as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 fi ## -------------------- ## ## Main body of script. ## ## -------------------- ## ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu ac_aux_dir= for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do if test -f "$ac_dir/install-sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install-sh -c" break elif test -f "$ac_dir/install.sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install.sh -c" break elif test -f "$ac_dir/shtool"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/shtool install -c" break fi done if test -z "$ac_aux_dir"; then as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 fi # These three variables are undocumented and unsupported, # and are intended to be withdrawn in a future Autoconf release. # They can cause serious problems if a builder's source tree is in a directory # whose full name contains unusual characters. ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or # incompatible versions: # SysV /etc/install, /usr/sbin/install # SunOS /usr/etc/install # IRIX /sbin/install # AIX /bin/install # AmigaOS /C/install, which installs bootblocks on floppy discs # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag # AFS /usr/afsws/bin/install, which mishandles nonexistent args # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" # OS/2's system install, which has a completely different semantic # ./install, which can be erroneously created by make from ./install.sh. # Reject install programs that cannot install multiple files. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 $as_echo_n "checking for a BSD-compatible install... " >&6; } if test -z "$INSTALL"; then if ${ac_cv_path_install+:} false; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. # Account for people who put trailing slashes in PATH elements. case $as_dir/ in #(( ./ | .// | /[cC]/* | \ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ /usr/ucb/* ) ;; *) # OSF1 and SCO ODT 3.0 have their own names for install. # Don't use installbsd from OSF since it installs stuff as root # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. : elif test $ac_prog = install && grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # program-specific install script used by HP pwplus--don't use. : else rm -rf conftest.one conftest.two conftest.dir echo one > conftest.one echo two > conftest.two mkdir conftest.dir if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && test -s conftest.one && test -s conftest.two && test -s conftest.dir/conftest.one && test -s conftest.dir/conftest.two then ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" break 3 fi fi fi done done ;; esac done IFS=$as_save_IFS rm -rf conftest.one conftest.two conftest.dir fi if test "${ac_cv_path_install+set}" = set; then INSTALL=$ac_cv_path_install else # As a last resort, use the slow shell script. Don't cache a # value for INSTALL within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. INSTALL=$ac_install_sh fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 $as_echo "$INSTALL" >&6; } # Use test -z because SunOS4 sh mishandles braces in ${var-val}. # It thinks the first close brace ends the variable substitution. test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args. set dummy ${ac_tool_prefix}pkg-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_PKG_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $PKG_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_PKG_CONFIG="$PKG_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi PKG_CONFIG=$ac_cv_path_PKG_CONFIG if test -n "$PKG_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PKG_CONFIG" >&5 $as_echo "$PKG_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_path_PKG_CONFIG"; then ac_pt_PKG_CONFIG=$PKG_CONFIG # Extract the first word of "pkg-config", so it can be a program name with args. set dummy pkg-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_ac_pt_PKG_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $ac_pt_PKG_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_ac_pt_PKG_CONFIG="$ac_pt_PKG_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi ac_pt_PKG_CONFIG=$ac_cv_path_ac_pt_PKG_CONFIG if test -n "$ac_pt_PKG_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_PKG_CONFIG" >&5 $as_echo "$ac_pt_PKG_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_pt_PKG_CONFIG" = x; then PKG_CONFIG="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac PKG_CONFIG=$ac_pt_PKG_CONFIG fi else PKG_CONFIG="$ac_cv_path_PKG_CONFIG" fi fi if test -n "$PKG_CONFIG"; then _pkg_min_version=0.9.0 { $as_echo "$as_me:${as_lineno-$LINENO}: checking pkg-config is at least version $_pkg_min_version" >&5 $as_echo_n "checking pkg-config is at least version $_pkg_min_version... " >&6; } if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PKG_CONFIG="" fi fi for ac_prog in dmd ldmd2 ldc2 do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_DC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$DC"; then ac_cv_prog_DC="$DC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DC=$ac_cv_prog_DC if test -n "$DC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DC" >&5 $as_echo "$DC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$DC" && break done test -n "$DC" || DC="NOT_FOUND" DC_TYPE= case $(basename $DC) in dmd) DC_TYPE=dmd ;; ldmd2) DC_TYPE=dmd ;; ldc2) DC_TYPE=ldc ;; NOT_FOUND) as_fn_error 1 "Could not find any compatible D compiler" "$LINENO" 5 esac vercomp () { IFS=. read -r a0 a1 a2 aa <' $bb then return 1 else return 0 fi fi fi fi } DO_VERSION_CHECK=1 # Check whether --enable-version-check was given. if test "${enable_version_check+set}" = set; then : enableval=$enable_version_check; fi if test "x$enable_version_check" = "xno"; then : DO_VERSION_CHECK=0 fi if test "$DO_VERSION_CHECK" = "1"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: checking version of D compiler" >&5 $as_echo_n "checking version of D compiler... " >&6; } # check for valid versions case $(basename $DC) in ldmd2|ldc2) # LDC - the LLVM D compiler (1.12.0): ... VERSION=`$DC --version` # remove everything up to first ( VERSION=${VERSION#* (} # remove everthing after ): VERSION=${VERSION%%):*} # now version should be something like L.M.N MINVERSION=1.12.0 ;; dmd) # DMD64 D Compiler v2.085.1\n... VERSION=`$DC --version | tr '\n' ' '` VERSION=${VERSION#*Compiler v} VERSION=${VERSION%% *} # now version should be something like L.M.N MINVERSION=2.083.1 ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $VERSION" >&5 $as_echo "$VERSION" >&6; } vercomp $MINVERSION $VERSION if test $? = 1 then as_fn_error 1 "Compiler version insufficient, current compiler version $VERSION, minimum version $MINVERSION" "$LINENO" 5 fi #echo "MINVERSION=$MINVERSION VERSION=$VERSION" fi PACKAGE_DATE="December 2019" pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for curl" >&5 $as_echo_n "checking for curl... " >&6; } if test -n "$curl_CFLAGS"; then pkg_cv_curl_CFLAGS="$curl_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libcurl\""; } >&5 ($PKG_CONFIG --exists --print-errors "libcurl") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_curl_CFLAGS=`$PKG_CONFIG --cflags "libcurl" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$curl_LIBS"; then pkg_cv_curl_LIBS="$curl_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libcurl\""; } >&5 ($PKG_CONFIG --exists --print-errors "libcurl") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_curl_LIBS=`$PKG_CONFIG --libs "libcurl" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then curl_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libcurl" 2>&1` else curl_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libcurl" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$curl_PKG_ERRORS" >&5 as_fn_error $? "Package requirements (libcurl) were not met: $curl_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. Alternatively, you may set the environment variables curl_CFLAGS and curl_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details." "$LINENO" 5 elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. Alternatively, you may set the environment variables curl_CFLAGS and curl_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details. To get pkg-config, see . See \`config.log' for more details" "$LINENO" 5; } else curl_CFLAGS=$pkg_cv_curl_CFLAGS curl_LIBS=$pkg_cv_curl_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sqlite" >&5 $as_echo_n "checking for sqlite... " >&6; } if test -n "$sqlite_CFLAGS"; then pkg_cv_sqlite_CFLAGS="$sqlite_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"sqlite3\""; } >&5 ($PKG_CONFIG --exists --print-errors "sqlite3") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_sqlite_CFLAGS=`$PKG_CONFIG --cflags "sqlite3" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$sqlite_LIBS"; then pkg_cv_sqlite_LIBS="$sqlite_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"sqlite3\""; } >&5 ($PKG_CONFIG --exists --print-errors "sqlite3") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_sqlite_LIBS=`$PKG_CONFIG --libs "sqlite3" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then sqlite_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "sqlite3" 2>&1` else sqlite_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "sqlite3" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$sqlite_PKG_ERRORS" >&5 as_fn_error $? "Package requirements (sqlite3) were not met: $sqlite_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. Alternatively, you may set the environment variables sqlite_CFLAGS and sqlite_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details." "$LINENO" 5 elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. Alternatively, you may set the environment variables sqlite_CFLAGS and sqlite_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details. To get pkg-config, see . See \`config.log' for more details" "$LINENO" 5; } else sqlite_CFLAGS=$pkg_cv_sqlite_CFLAGS sqlite_LIBS=$pkg_cv_sqlite_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi # Check whether --with-systemdsystemunitdir was given. if test "${with_systemdsystemunitdir+set}" = set; then : withval=$with_systemdsystemunitdir; else with_systemdsystemunitdir=auto fi if test "x$with_systemdsystemunitdir" = "xyes" -o "x$with_systemdsystemunitdir" = "xauto"; then : def_systemdsystemunitdir=$($PKG_CONFIG --variable=systemdsystemunitdir systemd) if test "x$def_systemdsystemunitdir" = "x"; then : if test "x$with_systemdsystemunitdir" = "xyes"; then : as_fn_error $? "systemd support requested but pkg-config unable to query systemd package" "$LINENO" 5 fi with_systemdsystemunitdir=no else with_systemdsystemunitdir="$def_systemdsystemunitdir" fi fi if test "x$with_systemdsystemunitdir" != "xno"; then : systemdsystemunitdir=$with_systemdsystemunitdir fi # Check whether --with-systemduserunitdir was given. if test "${with_systemduserunitdir+set}" = set; then : withval=$with_systemduserunitdir; else with_systemduserunitdir=auto fi if test "x$with_systemduserunitdir" = "xyes" -o "x$with_systemduserunitdir" = "xauto"; then : def_systemduserunitdir=$($PKG_CONFIG --variable=systemduserunitdir systemd) if test "x$def_systemduserunitdir" = "x"; then : if test "x$with_systemduserunitdir" = "xyes"; then : as_fn_error $? "systemd support requested but pkg-config unable to query systemd package" "$LINENO" 5 fi with_systemduserunitdir=no else with_systemduserunitdir="$def_systemduserunitdir" fi fi if test "x$with_systemduserunitdir" != "xno"; then : systemduserunitdir=$with_systemduserunitdir fi if test "x$with_systemduserunitdir" != "xno" -a "x$with_systemdsystemunitdir" != "xno"; then : havesystemd=yes else havesystemd=no fi HAVE_SYSTEMD=$havesystemd # Check whether --enable-notifications was given. if test "${enable_notifications+set}" = set; then : enableval=$enable_notifications; fi if test "x$enable_notifications" = "xyes"; then : enable_notifications=yes else enable_notifications=no fi if test "x$enable_notifications" = "xyes"; then : pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for notify" >&5 $as_echo_n "checking for notify... " >&6; } if test -n "$notify_CFLAGS"; then pkg_cv_notify_CFLAGS="$notify_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libnotify\""; } >&5 ($PKG_CONFIG --exists --print-errors "libnotify") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_notify_CFLAGS=`$PKG_CONFIG --cflags "libnotify" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$notify_LIBS"; then pkg_cv_notify_LIBS="$notify_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libnotify\""; } >&5 ($PKG_CONFIG --exists --print-errors "libnotify") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_notify_LIBS=`$PKG_CONFIG --libs "libnotify" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then notify_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libnotify" 2>&1` else notify_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libnotify" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$notify_PKG_ERRORS" >&5 enable_notifications=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } enable_notifications=no else notify_CFLAGS=$pkg_cv_notify_CFLAGS notify_LIBS=$pkg_cv_notify_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi else notify_LIBS="" fi NOTIFICATIONS=$enable_notifications # Check whether --enable-completions was given. if test "${enable_completions+set}" = set; then : enableval=$enable_completions; fi if test "x$enable_completions" = "xyes"; then : enable_completions=yes else enable_completions=no fi COMPLETIONS=$enable_completions if test "x$enable_completions" = "xyes"; then : # Check whether --with-bash-completion-dir was given. if test "${with_bash_completion_dir+set}" = set; then : withval=$with_bash_completion_dir; else with_bash_completion_dir=auto fi if test "x$with_bash_completion_dir" = "xyes" -o "x$with_bash_completion_dir" = "xauto"; then : if test -n "$bashcompdir"; then pkg_cv_bashcompdir="$bashcompdir" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"bash-completion\""; } >&5 ($PKG_CONFIG --exists --print-errors "bash-completion") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_bashcompdir=`$PKG_CONFIG --variable="completionsdir" "bash-completion" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi bashcompdir=$pkg_cv_bashcompdir if test "x$bashcompdir" = x""; then : bashcompdir="${sysconfdir}/bash_completion.d" fi with_bash_completion_dir=$bashcompdir fi BASH_COMPLETION_DIR=$with_bash_completion_dir # Check whether --with-zsh-completion-dir was given. if test "${with_zsh_completion_dir+set}" = set; then : withval=$with_zsh_completion_dir; else with_zsh_completion_dir=auto fi if test "x$with_zsh_completion_dir" = "xyes" -o "x$with_zsh_completion_dir" = "xauto"; then : with_zsh_completion_dir="/usr/local/share/zsh/site-functions" fi ZSH_COMPLETION_DIR=$with_zsh_completion_dir fi # Check whether --enable-debug was given. if test "${enable_debug+set}" = set; then : enableval=$enable_debug; fi if test "x$enable_debug" = "xyes"; then : DEBUG=yes else DEBUG=no fi ac_config_files="$ac_config_files Makefile contrib/pacman/PKGBUILD contrib/spec/onedrive.spec onedrive.1 contrib/systemd/onedrive.service contrib/systemd/onedrive@.service" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure # tests run on this system so they can be shared between configure # scripts and configure runs, see configure's option --config-cache. # It is not useful on other systems. If it contains results you don't # want to keep, you may remove or edit it. # # config.status only pays attention to the cache file if you give it # the --recheck option to rerun configure. # # `ac_cv_env_foo' variables (set or unset) will be overridden when # loading this file, other *unset* `ac_cv_foo' will be assigned the # following values. _ACEOF # The following way of writing the cache mishandles newlines in values, # but we know of no workaround that is simple, portable, and efficient. # So, we kill variables containing newlines. # Ultrix sh set writes to stderr and can't be redirected directly, # and sets the high bit in the cache file unless we assign to the vars. ( for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space=' '; set) 2>&1` in #( *${as_nl}ac_space=\ *) # `set' does not quote correctly, so add quotes: double-quote # substitution turns \\\\ into \\, and sed turns \\ into \. sed -n \ "s/'/'\\\\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" ;; #( *) # `set' quotes correctly as required by POSIX, so do not add quotes. sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) | sed ' /^ac_cv_env_/b end t clear :clear s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ t end s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ :end' >>confcache if diff "$cache_file" confcache >/dev/null 2>&1; then :; else if test -w "$cache_file"; then if test "x$cache_file" != "x/dev/null"; then { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 $as_echo "$as_me: updating cache $cache_file" >&6;} if test ! -f "$cache_file" || test -h "$cache_file"; then cat confcache >"$cache_file" else case $cache_file in #( */* | ?:*) mv -f confcache "$cache_file"$$ && mv -f "$cache_file"$$ "$cache_file" ;; #( *) mv -f confcache "$cache_file" ;; esac fi fi else { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 $as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} fi fi rm -f confcache test "x$prefix" = xNONE && prefix=$ac_default_prefix # Let make expand exec_prefix. test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' # Transform confdefs.h into DEFS. # Protect against shell expansion while executing Makefile rules. # Protect against Makefile macro expansion. # # If the first sed substitution is executed (which looks for macros that # take arguments), then branch to the quote section. Otherwise, # look for a macro that doesn't take arguments. ac_script=' :mline /\\$/{ N s,\\\n,, b mline } t clear :clear s/^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*([^)]*)\)[ ]*\(.*\)/-D\1=\2/g t quote s/^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)/-D\1=\2/g t quote b any :quote s/[ `~#$^&*(){}\\|;'\''"<>?]/\\&/g s/\[/\\&/g s/\]/\\&/g s/\$/$$/g H :any ${ g s/^\n// s/\n/ /g p } ' DEFS=`sed -n "$ac_script" confdefs.h` ac_libobjs= ac_ltlibobjs= U= for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue # 1. Remove the extension, and $U if already installed. ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' ac_i=`$as_echo "$ac_i" | sed "$ac_script"` # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR # will be set to the directory where LIBOBJS objects are built. as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' done LIBOBJS=$ac_libobjs LTLIBOBJS=$ac_ltlibobjs : "${CONFIG_STATUS=./config.status}" ac_write_fail=0 ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files $CONFIG_STATUS" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 $as_echo "$as_me: creating $CONFIG_STATUS" >&6;} as_write_fail=0 cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 #! $SHELL # Generated by $as_me. # Run this file to recreate the current configuration. # Compiler output produced by configure, useful for debugging # configure, is in config.log if it exists. debug=false ac_cs_recheck=false ac_cs_silent=false SHELL=\${CONFIG_SHELL-$SHELL} export SHELL _ASEOF cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" exec 6>&1 ## ----------------------------------- ## ## Main body of $CONFIG_STATUS script. ## ## ----------------------------------- ## _ASEOF test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Save the log message, to keep $0 and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" This file was extended by onedrive $as_me v2.3.13, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS CONFIG_LINKS = $CONFIG_LINKS CONFIG_COMMANDS = $CONFIG_COMMANDS $ $0 $@ on `(hostname || uname -n) 2>/dev/null | sed 1q` " _ACEOF case $ac_config_files in *" "*) set x $ac_config_files; shift; ac_config_files=$*;; esac cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # Files that config.status was made for. config_files="$ac_config_files" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ac_cs_usage="\ \`$as_me' instantiates files and other configuration actions from templates according to the current configuration. Unless the files and actions are specified as TAGs, all are instantiated by default. Usage: $0 [OPTION]... [TAG]... -h, --help print this help, then exit -V, --version print version number and configuration settings, then exit --config print configuration, then exit -q, --quiet, --silent do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions --file=FILE[:TEMPLATE] instantiate the configuration file FILE Configuration files: $config_files Report bugs to ." _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ onedrive config.status v2.3.13 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" Copyright (C) 2012 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." ac_pwd='$ac_pwd' srcdir='$srcdir' INSTALL='$INSTALL' test -n "\$AWK" || AWK=awk _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # The default lists apply if the user does not specify any file. ac_need_defaults=: while test $# != 0 do case $1 in --*=?*) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` ac_shift=: ;; --*=) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg= ac_shift=: ;; *) ac_option=$1 ac_optarg=$2 ac_shift=shift ;; esac case $ac_option in # Handling of the options. -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) $as_echo "$ac_cs_version"; exit ;; --config | --confi | --conf | --con | --co | --c ) $as_echo "$ac_cs_config"; exit ;; --debug | --debu | --deb | --de | --d | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; '') as_fn_error $? "missing file argument" ;; esac as_fn_append CONFIG_FILES " '$ac_optarg'" ac_need_defaults=false;; --he | --h | --help | --hel | -h ) $as_echo "$ac_cs_usage"; exit ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. -*) as_fn_error $? "unrecognized option: \`$1' Try \`$0 --help' for more information." ;; *) as_fn_append ac_config_targets " $1" ac_need_defaults=false ;; esac shift done ac_configure_extra_args= if $ac_cs_silent; then exec 6>/dev/null ac_configure_extra_args="$ac_configure_extra_args --silent" fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' export CONFIG_SHELL exec "\$@" fi _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX $as_echo "$ac_log" } >&5 _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Handling of arguments. for ac_config_target in $ac_config_targets do case $ac_config_target in "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; "contrib/pacman/PKGBUILD") CONFIG_FILES="$CONFIG_FILES contrib/pacman/PKGBUILD" ;; "contrib/spec/onedrive.spec") CONFIG_FILES="$CONFIG_FILES contrib/spec/onedrive.spec" ;; "onedrive.1") CONFIG_FILES="$CONFIG_FILES onedrive.1" ;; "contrib/systemd/onedrive.service") CONFIG_FILES="$CONFIG_FILES contrib/systemd/onedrive.service" ;; "contrib/systemd/onedrive@.service") CONFIG_FILES="$CONFIG_FILES contrib/systemd/onedrive@.service" ;; *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; esac done # If the user did not use the arguments to specify the items to instantiate, # then the envvar interface is used. Set only those that are not. # We use the long form for the default assignment because of an extremely # bizarre bug on SunOS 4.1.3. if $ac_need_defaults; then test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files fi # Have a temporary directory for convenience. Make it in the build tree # simply because there is no reason against having it here, and in addition, # creating and moving files from /tmp can sometimes cause problems. # Hook for its removal unless debugging. # Note that there is a small window in which the directory will not be cleaned: # after its creation but before its name has been assigned to `$tmp'. $debug || { tmp= ac_tmp= trap 'exit_status=$? : "${ac_tmp:=$tmp}" { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status ' 0 trap 'as_fn_exit 1' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && test -d "$tmp" } || { tmp=./conf$$-$RANDOM (umask 077 && mkdir "$tmp") } || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 ac_tmp=$tmp # Set up the scripts for CONFIG_FILES section. # No need to generate them if there are no CONFIG_FILES. # This happens for instance with `./config.status config.h'. if test -n "$CONFIG_FILES"; then ac_cr=`echo X | tr X '\015'` # On cygwin, bash can eat \r inside `` if the user requested igncr. # But we know of no other shell where ac_cr would be empty at this # point, so we can use a bashism as a fallback. if test "x$ac_cr" = x; then eval ac_cr=\$\'\\r\' fi ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then ac_cs_awk_cr='\\r' else ac_cs_awk_cr=$ac_cr fi echo 'BEGIN {' >"$ac_tmp/subs1.awk" && _ACEOF { echo "cat >conf$$subs.awk <<_ACEOF" && echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && echo "_ACEOF" } >conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` ac_delim='%!_!# ' for ac_last_try in false false false false false :; do . ./conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` if test $ac_delim_n = $ac_delim_num; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done rm -f conf$$subs.sh cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && _ACEOF sed -n ' h s/^/S["/; s/!.*/"]=/ p g s/^[^!]*!// :repl t repl s/'"$ac_delim"'$// t delim :nl h s/\(.\{148\}\)..*/\1/ t more1 s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ p n b repl :more1 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t nl :delim h s/\(.\{148\}\)..*/\1/ t more2 s/["\\]/\\&/g; s/^/"/; s/$/"/ p b :more2 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t delim ' >$CONFIG_STATUS || ac_write_fail=1 rm -f conf$$subs.awk cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 _ACAWK cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && for (key in S) S_is_set[key] = 1 FS = "" } { line = $ 0 nfields = split(line, field, "@") substed = 0 len = length(field[1]) for (i = 2; i < nfields; i++) { key = field[i] keylen = length(key) if (S_is_set[key]) { value = S[key] line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) len += length(value) + length(field[++i]) substed = 1 } else len += 1 + keylen } print line } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" else cat fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 _ACEOF # VPATH may cause trouble with some makes, so we remove sole $(srcdir), # ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and # trailing colons and then remove the whole line if VPATH becomes empty # (actually we leave an empty line to preserve line numbers). if test "x$srcdir" = x.; then ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ h s/// s/^/:/ s/[ ]*$/:/ s/:\$(srcdir):/:/g s/:\${srcdir}:/:/g s/:@srcdir@:/:/g s/^:*// s/:*$// x s/\(=[ ]*\).*/\1/ G s/\n// s/^[^=]*=[ ]*$// }' fi cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 fi # test -n "$CONFIG_FILES" eval set X " :F $CONFIG_FILES " shift for ac_tag do case $ac_tag in :[FHLC]) ac_mode=$ac_tag; continue;; esac case $ac_mode$ac_tag in :[FHL]*:*);; :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; :[FH]-) ac_tag=-:-;; :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; esac ac_save_IFS=$IFS IFS=: set x $ac_tag IFS=$ac_save_IFS shift ac_file=$1 shift case $ac_mode in :L) ac_source=$1;; :[FH]) ac_file_inputs= for ac_f do case $ac_f in -) ac_f="$ac_tmp/stdin";; *) # Look for the file first in the build tree, then in the source tree # (if the path is not absolute). The absolute path cannot be DOS-style, # because $ac_f cannot contain `:'. test -f "$ac_f" || case $ac_f in [\\/$]*) false;; *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; esac || as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; esac case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac as_fn_append ac_file_inputs " '$ac_f'" done # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ configure_input='Generated from '` $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' `' by configure.' if test x"$ac_file" != x-; then configure_input="$ac_file. $configure_input" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 $as_echo "$as_me: creating $ac_file" >&6;} fi # Neutralize special characters interpreted by sed in replacement strings. case $configure_input in #( *\&* | *\|* | *\\* ) ac_sed_conf_input=`$as_echo "$configure_input" | sed 's/[\\\\&|]/\\\\&/g'`;; #( *) ac_sed_conf_input=$configure_input;; esac case $ac_tag in *:-:* | *:-) cat >"$ac_tmp/stdin" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; esac ;; esac ac_dir=`$as_dirname -- "$ac_file" || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir="$ac_dir"; as_fn_mkdir_p ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix case $ac_mode in :F) # # CONFIG_FILE # case $INSTALL in [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; esac _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # If the template does not know about datarootdir, expand it. # FIXME: This hack should be removed a few years after 2.60. ac_datarootdir_hack=; ac_datarootdir_seen= ac_sed_dataroot=' /datarootdir/ { p q } /@datadir@/p /@docdir@/p /@infodir@/p /@localedir@/p /@mandir@/p' case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in *datarootdir*) ac_datarootdir_seen=yes;; *@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 $as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_datarootdir_hack=' s&@datadir@&$datadir&g s&@docdir@&$docdir&g s&@infodir@&$infodir&g s&@localedir@&$localedir&g s&@mandir@&$mandir&g s&\\\${datarootdir}&$datarootdir&g' ;; esac _ACEOF # Neutralize VPATH when `$srcdir' = `.'. # Shell code in configure.ac might set extrasub. # FIXME: do we really want to maintain this feature? cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_sed_extra="$ac_vpsub $extrasub _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b s|@configure_input@|$ac_sed_conf_input|;t t s&@top_builddir@&$ac_top_builddir_sub&;t t s&@top_build_prefix@&$ac_top_build_prefix&;t t s&@srcdir@&$ac_srcdir&;t t s&@abs_srcdir@&$ac_abs_srcdir&;t t s&@top_srcdir@&$ac_top_srcdir&;t t s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t s&@builddir@&$ac_builddir&;t t s&@abs_builddir@&$ac_abs_builddir&;t t s&@abs_top_builddir@&$ac_abs_top_builddir&;t t s&@INSTALL@&$ac_INSTALL&;t t $ac_datarootdir_hack " eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ "$ac_tmp/out"`; test -z "$ac_out"; } && { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&5 $as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&2;} rm -f "$ac_tmp/stdin" case $ac_file in -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; esac \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; esac done # for ac_tag as_fn_exit 0 _ACEOF ac_clean_files=$ac_clean_files_save test $ac_write_fail = 0 || as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 # configure is writing to config.log, and then calls config.status. # config.status does its own redirection, appending to config.log. # Unfortunately, on DOS this fails, as config.log is still kept open # by configure, so config.status won't be able to write to it; its # output is simply discarded. So we exec the FD to /dev/null, # effectively closing config.log, so it can be properly (re)opened and # appended to by config.status. When coming back to configure, we # need to make the FD available again. if test "$no_create" != yes; then ac_cs_success=: ac_config_status_args= test "$silent" = yes && ac_config_status_args="$ac_config_status_args --quiet" exec 5>/dev/null $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false exec 5>>config.log # Use ||, not &&, to avoid exiting from the if with $? = 1, which # would make configure fail if this is the last instruction. $ac_cs_success || as_fn_exit 1 fi if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} fi onedrive-2.3.13/configure.ac000066400000000000000000000226531360252424000157070ustar00rootroot00000000000000dnl configure.ac for OneDrive Linux Client dnl Copyright 2019 Norbert Preining dnl Licensed GPL v3 or later dnl How to make a release dnl - increase the version number in the AC_INIT call below dnl - run autoconf which generates configure dnl - commit the changed files (configure.ac, configure) dnl - tag the release AC_PREREQ([2.69]) AC_INIT([onedrive],[v2.3.13], [https://github.com/abraunegg/onedrive], [onedrive]) AC_CONFIG_SRCDIR([src/main.d]) AC_ARG_VAR([DC], [D compiler executable]) AC_ARG_VAR([DCFLAGS], [flags for D compiler]) dnl necessary programs: install, pkg-config AC_PROG_INSTALL PKG_PROG_PKG_CONFIG dnl Determine D compiler dnl we check for dmd, dmd2, and ldc2 in this order dnl furthermore, we set DC_TYPE to either dmd or ldc and export this into the dnl Makefile so that we can adjust command line arguments AC_CHECK_PROGS([DC], [dmd ldmd2 ldc2], NOT_FOUND) DC_TYPE= case $(basename $DC) in dmd) DC_TYPE=dmd ;; ldmd2) DC_TYPE=dmd ;; ldc2) DC_TYPE=ldc ;; NOT_FOUND) AC_MSG_ERROR(Could not find any compatible D compiler, 1) esac dnl dash/POSIX version of version comparison vercomp () { IFS=. read -r a0 a1 a2 aa <' $bb then return 1 else return 0 fi fi fi fi } DO_VERSION_CHECK=1 AC_ARG_ENABLE(version-check, AS_HELP_STRING([--disable-version-check], [Disable checks of compiler version during configure time])) AS_IF([test "x$enable_version_check" = "xno"], DO_VERSION_CHECK=0,) AS_IF([test "$DO_VERSION_CHECK" = "1"], [ dnl do the version check AC_MSG_CHECKING([version of D compiler]) # check for valid versions case $(basename $DC) in ldmd2|ldc2) # LDC - the LLVM D compiler (1.12.0): ... VERSION=`$DC --version` # remove everything up to first ( VERSION=${VERSION#* (} # remove everthing after ): VERSION=${VERSION%%):*} # now version should be something like L.M.N MINVERSION=1.12.0 ;; dmd) # DMD64 D Compiler v2.085.1\n... VERSION=`$DC --version | tr '\n' ' '` VERSION=${VERSION#*Compiler v} VERSION=${VERSION%% *} # now version should be something like L.M.N MINVERSION=2.083.1 ;; esac AC_MSG_RESULT([$VERSION]) vercomp $MINVERSION $VERSION if test $? = 1 then AC_MSG_ERROR([Compiler version insufficient, current compiler version $VERSION, minimum version $MINVERSION], 1) fi #echo "MINVERSION=$MINVERSION VERSION=$VERSION" ]) AC_SUBST([DC_TYPE]) dnl In case the environment variable DCFLAGS is set, we export it to the dnl generated Makefile at configure run: AC_SUBST([DCFLAGS]) dnl The package date is only used in the man page onedrive.1.in dnl we generate onedrive.1 from it during configure run, but we want dnl to have the same date, namely the one when the configure script dnl was generated from the configure.ac (i.e., on release time). dnl this uses a call to the underlying m4 engine to call an external cmd PACKAGE_DATE="m4_esyscmd([date "+%B %Y" | tr -d '\n'])" AC_SUBST([PACKAGE_DATE]) dnl Check for required modules: curl and sqlite at the moment PKG_CHECK_MODULES([curl],[libcurl]) PKG_CHECK_MODULES([sqlite],[sqlite3]) dnl dnl systemd and unit file directories dnl This is a bit tricky, because we want to allow for dnl --with-systemdsystemunitdir=auto dnl as well as =/path/to/dir dnl The first step is that we check whether the --with options is passed to configure run dnl if yes, we don't do anything (the ,, at the end of the next line), and if not, we dnl set with_systemdsystemunitdir=auto, meaning we will try pkg-config to find the correct dnl value. AC_ARG_WITH([systemdsystemunitdir], [AS_HELP_STRING([--with-systemdsystemunitdir=DIR], [Directory for systemd system service files])],, [with_systemdsystemunitdir=auto]) dnl If no value is passed in (or auto/yes is passed in), then we try to find the correct dnl value via pkg-config and put it into $def_systemdsystemunitdir AS_IF([test "x$with_systemdsystemunitdir" = "xyes" -o "x$with_systemdsystemunitdir" = "xauto"], [ dnl true part, so try to determine with pkg-config def_systemdsystemunitdir=$($PKG_CONFIG --variable=systemdsystemunitdir systemd) dnl if we cannot find it via pkg-config, *and* the user explicitely passed it in with, dnl we warn, and in all cases we unset (set to no) the respective variable AS_IF([test "x$def_systemdsystemunitdir" = "x"], [ dnl we couldn't find the default value via pkg-config AS_IF([test "x$with_systemdsystemunitdir" = "xyes"], [AC_MSG_ERROR([systemd support requested but pkg-config unable to query systemd package])]) with_systemdsystemunitdir=no ], [ dnl pkg-config found the value, use it with_systemdsystemunitdir="$def_systemdsystemunitdir" ] ) ] ) dnl finally, if we found a value, put it into the generated Makefile AS_IF([test "x$with_systemdsystemunitdir" != "xno"], [AC_SUBST([systemdsystemunitdir], [$with_systemdsystemunitdir])]) dnl Now do the same as above for systemduserunitdir! AC_ARG_WITH([systemduserunitdir], [AS_HELP_STRING([--with-systemduserunitdir=DIR], [Directory for systemd user service files])],, [with_systemduserunitdir=auto]) AS_IF([test "x$with_systemduserunitdir" = "xyes" -o "x$with_systemduserunitdir" = "xauto"], [ def_systemduserunitdir=$($PKG_CONFIG --variable=systemduserunitdir systemd) AS_IF([test "x$def_systemduserunitdir" = "x"], [ AS_IF([test "x$with_systemduserunitdir" = "xyes"], [AC_MSG_ERROR([systemd support requested but pkg-config unable to query systemd package])]) with_systemduserunitdir=no ], [ with_systemduserunitdir="$def_systemduserunitdir" ] ) ] ) AS_IF([test "x$with_systemduserunitdir" != "xno"], [AC_SUBST([systemduserunitdir], [$with_systemduserunitdir])]) dnl We enable systemd integration only if we have found both user/system unit dirs AS_IF([test "x$with_systemduserunitdir" != "xno" -a "x$with_systemdsystemunitdir" != "xno"], [havesystemd=yes], [havesystemd=no]) AC_SUBST([HAVE_SYSTEMD], $havesystemd) dnl dnl Notification support dnl only check for libnotify if --enable-notifications is given AC_ARG_ENABLE(notifications, AS_HELP_STRING([--enable-notifications], [Enable desktop notifications via libnotify])) AS_IF([test "x$enable_notifications" = "xyes"], [enable_notifications=yes], [enable_notifications=no]) dnl if --enable-notifications was given, check for libnotify, and disable if not found dnl otherwise substitute the notifu AS_IF([test "x$enable_notifications" = "xyes"], [PKG_CHECK_MODULES(notify,libnotify,,enable_notifications=no)], [AC_SUBST([notify_LIBS],"")]) AC_SUBST([NOTIFICATIONS],$enable_notifications) dnl dnl Completion support dnl First determine whether completions are requested, pass that to Makefile AC_ARG_ENABLE([completions], AS_HELP_STRING([--enable-completions], [Install shell completions for bash and zsh])) AS_IF([test "x$enable_completions" = "xyes"], [enable_completions=yes], [enable_completions=no]) AC_SUBST([COMPLETIONS],$enable_completions) dnl if completions are enabled, search for the bash/zsh completion directory in the dnl similar way as we did for the systemd directories AS_IF([test "x$enable_completions" = "xyes"],[ AC_ARG_WITH([bash-completion-dir], [AS_HELP_STRING([--with-bash-completion-dir=DIR], [Directory for bash completion files])], , [with_bash_completion_dir=auto]) AS_IF([test "x$with_bash_completion_dir" = "xyes" -o "x$with_bash_completion_dir" = "xauto"], [ PKG_CHECK_VAR(bashcompdir, [bash-completion], [completionsdir], , bashcompdir="${sysconfdir}/bash_completion.d") with_bash_completion_dir=$bashcompdir ]) AC_SUBST([BASH_COMPLETION_DIR], $with_bash_completion_dir) AC_ARG_WITH([zsh-completion-dir], [AS_HELP_STRING([--with-zsh-completion-dir=DIR], [Directory for zsh completion files])],, [with_zsh_completion_dir=auto]) AS_IF([test "x$with_zsh_completion_dir" = "xyes" -o "x$with_zsh_completion_dir" = "xauto"], [ with_zsh_completion_dir="/usr/local/share/zsh/site-functions" ]) AC_SUBST([ZSH_COMPLETION_DIR], $with_zsh_completion_dir) ]) dnl dnl Debug support AC_ARG_ENABLE(debug, AS_HELP_STRING([--enable-debug], [Pass debug option to the compiler])) AS_IF([test "x$enable_debug" = "xyes"], AC_SUBST([DEBUG],yes), AC_SUBST([DEBUG],no)) dnl generate necessary files AC_CONFIG_FILES([ Makefile contrib/pacman/PKGBUILD contrib/spec/onedrive.spec onedrive.1 contrib/systemd/onedrive.service contrib/systemd/onedrive@.service ]) AC_OUTPUT onedrive-2.3.13/contrib/000077500000000000000000000000001360252424000150515ustar00rootroot00000000000000onedrive-2.3.13/contrib/completions/000077500000000000000000000000001360252424000174055ustar00rootroot00000000000000onedrive-2.3.13/contrib/completions/complete.bash000066400000000000000000000025771360252424000220670ustar00rootroot00000000000000#!/bin/bash # # BASH completion code for OneDrive Linux Client # (c) 2019 Norbert Preining # License: GPLv3+ (as with the rest of the OneDrive Linux client project) _onedrive() { local cur prev COMPREPLY=() cur=${COMP_WORDS[COMP_CWORD]} prev=${COMP_WORDS[COMP_CWORD-1]} options='--check-for-nomount --check-for-nosync --debug-https --disable-notifications --display-config --display-sync-status --download-only --disable-upload-validation --dry-run --enable-logging --force-http-1.1 --local-first --logout -m --monitor --no-remote-delete --print-token --resync --skip-dot-files --skip-symlinks --synchronize --upload-only -v --verbose --version -h --help' argopts='--create-directory --get-O365-drive-id --remove-directory --single-directory --source-directory' # Loop on the arguments to manage conflicting options for (( i=0; i < ${#COMP_WORDS[@]}-1; i++ )); do #exclude some mutually exclusive options [[ ${COMP_WORDS[i]} == '--synchronize' ]] && options=${options/--monitor} [[ ${COMP_WORDS[i]} == '--monitor' ]] && options=${options/--synchronize} done case "$prev" in --confdir|--syncdir) _filedir return 0 ;; --create-directory|--get-O365-drive-id|--remove-directory|--single-directory|--source-directory) return 0 ;; *) COMPREPLY=( $( compgen -W "$options $argopts" -- $cur)) return 0 ;; esac # notreached return 0 } complete -F _onedrive onedrive onedrive-2.3.13/contrib/completions/complete.zsh000066400000000000000000000055461360252424000217550ustar00rootroot00000000000000#compdef onedrive # # ZSH completion code for OneDrive Linux Client # (c) 2019 Norbert Preining # License: GPLv3+ (as with the rest of the OneDrive Linux client project) local -a all_opts all_opts=( '--check-for-nomount[Check for the presence of .nosync in the syncdir root. If found, do not perform sync.]' '--check-for-nosync[Check for the presence of .nosync in each directory. If found, skip directory from sync.]' '--confdir[Set the directory used to store the configuration files]:config directory:_files -/' '--create-directory[Create a directory on OneDrive - no sync will be performed.]:directory name:' '--debug-https[Debug OneDrive HTTPS communication.]' '--destination-directory[Destination directory for renamed or move on OneDrive - no sync will be performed.]:directory name:' '--disable-notifications[Do not use desktop notifications in monitor mode.]' '--display-config[Display what options the client will use as currently configured - no sync will be performed.]' '--display-sync-status[Display the sync status of the client - no sync will be performed.]' '--download-only[Only download remote changes]' '--disable-upload-validation[Disable upload validation when uploading to OneDrive]' '--dry-run[Perform a trial sync with no changes made]' '--enable-logging[Enable client activity to a separate log file]' '--force-http-1.1[Force the use of HTTP 1.1 for all operations]' '--get-O365-drive-id[Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library]:' '--local-first[Synchronize from the local directory source first, before downloading changes from OneDrive.]' '--logout[Logout the current user]' '(-m --monitor)'{-m,--monitor}'[Keep monitoring for local and remote changes]' '--no-remote-delete[Do not delete local file deletes from OneDrive when using --upload-only]' '--print-token[Print the access token, useful for debugging]' '--resync[Forget the last saved state, perform a full sync]' '--remove-directory[Remove a directory on OneDrive - no sync will be performed.]:directory name:' '--single-directory[Specify a single local directory within the OneDrive root to sync.]:source directory:_files -/' '--skip-dot-files[Skip dot files and folders from syncing]' '--skip-symlinks[Skip syncing of symlinks]' '--source-directory[Source directory to rename or move on OneDrive - no sync will be performed.]:source directory:' '--syncdir[Specify the local directory used for synchronization to OneDrive]:sync directory:_files -/' '--synchronize[Perform a synchronization]' '--upload-only[Only upload to OneDrive, do not sync changes from OneDrive locally]' '(-v --verbose)'{-v,--verbose}'[Print more details, useful for debugging (repeat for extra debugging)]' '--version[Print the version and exit]' '(-h --help)'{-h,--help}'[Print help information]' ) _arguments -S "$all_opts[@]" && return 0 onedrive-2.3.13/contrib/docker/000077500000000000000000000000001360252424000163205ustar00rootroot00000000000000onedrive-2.3.13/contrib/docker/Dockerfile000066400000000000000000000021041360252424000203070ustar00rootroot00000000000000FROM centos:7 ENV GOSU_VERSION=1.11 RUN yum install -y make git gcc libcurl-devel sqlite-devel pkg-config && \ rm -rf /var/cache/yum/ && \ curl -fsS -o install.sh https://dlang.org/install.sh && \ bash install.sh dmd && \ # gosu installation gpg --keyserver ha.pool.sks-keyservers.net --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4 \ && curl -o /usr/local/bin/gosu -SL "https://github.com/tianon/gosu/releases/download/${GOSU_VERSION}/gosu-amd64" \ && curl -o /usr/local/bin/gosu.asc -SL "https://github.com/tianon/gosu/releases/download/${GOSU_VERSION}/gosu-amd64.asc" \ && gpg --verify /usr/local/bin/gosu.asc \ && rm /usr/local/bin/gosu.asc \ && rm -r /root/.gnupg/ \ && chmod +x /usr/local/bin/gosu \ && gosu nobody true RUN mkdir -p /onedrive/conf /onedrive/data COPY . /usr/src/onedrive RUN . "$(bash install.sh -a)" && \ cd /usr/src/onedrive/ && \ ./configure && \ make clean && \ make && \ make install COPY contrib/docker/entrypoint.sh /entrypoint.sh VOLUME ["/onedrive/conf"] ENTRYPOINT ["/entrypoint.sh"] onedrive-2.3.13/contrib/docker/Dockerfile-alpine000066400000000000000000000017201360252424000215600ustar00rootroot00000000000000# -*-Dockerfile-*- FROM golang:alpine RUN apk add alpine-sdk bash llvm5 gnupg xz jq curl-dev sqlite-dev binutils-gold ARG LDC_VERSION=1.13.0 ENV LDC_VERSION=${LDC_VERSION} RUN curl -fsSL "https://github.com/ldc-developers/ldc/releases/download/v${LDC_VERSION}/ldc2-${LDC_VERSION}-alpine-linux-x86_64.tar.xz" |\ tar xJf - -C / && \ mv "/ldc2-${LDC_VERSION}-alpine-linux-x86_64" "/ldc" ENV PATH="/ldc/bin:${PATH}" \ LD_LIBRARY_PATH="/ldc/lib:/usr/lib:/lib:${LD_LIBRARY_PATH}" \ LIBRARY_PATH="/ldc/lib:/usr/lib:/lib:${LD_LIBRARY_PATH}" RUN go get github.com/tianon/gosu COPY . /usr/src/onedrive RUN cd /usr/src/onedrive/ && \ DC=ldmd2 ./configure && \ make clean && \ make && \ make install FROM alpine ENTRYPOINT ["/entrypoint.sh"] RUN apk add --no-cache bash libcurl libgcc shadow sqlite-libs && \ mkdir -p /onedrive/conf /onedrive/data COPY contrib/docker/entrypoint.sh / COPY --from=0 /go/bin/gosu /usr/local/bin/onedrive /usr/local/bin/ onedrive-2.3.13/contrib/docker/Dockerfile-stretch000066400000000000000000000012561360252424000217700ustar00rootroot00000000000000# -*-Dockerfile-*- FROM debian:stretch RUN apt update && \ apt install -y build-essential curl libcurl4-openssl-dev libsqlite3-dev pkg-config git RUN curl -fsS -o install.sh https://dlang.org/install.sh && \ bash install.sh dmd COPY . /usr/src/onedrive RUN . "$(bash install.sh -a)" && \ cd /usr/src/onedrive/ && \ ./configure && \ make clean && \ make && \ make install FROM debian:stretch-slim ENTRYPOINT ["/entrypoint.sh"] RUN apt update && \ apt install -y gosu libcurl3 libsqlite3-0 && \ rm -rf /var/*/apt && \ mkdir -p /onedrive/conf /onedrive/data COPY contrib/docker/entrypoint.sh / COPY --from=0 /usr/local/bin/onedrive /usr/local/bin/ onedrive-2.3.13/contrib/docker/entrypoint.sh000077500000000000000000000026761360252424000211050ustar00rootroot00000000000000#!/bin/bash -eu set +H -xeuo pipefail : ${ONEDRIVE_UID:=$(stat /onedrive/data -c '%u')} : ${ONEDRIVE_GID:=$(stat /onedrive/data -c '%g')} # Create new group using target GID if ! odgroup="$(getent group $ONEDRIVE_GID)"; then odgroup='onedrive' groupadd "${odgroup}" -g $ONEDRIVE_GID else odgroup=${odgroup%%:*} fi # Create new user using target UID if ! oduser="$(getent passwd $ONEDRIVE_UID)"; then oduser='onedrive' useradd -m "${oduser}" -u $ONEDRIVE_UID -g $ONEDRIVE_GID else oduser="${oduser%%:*}" usermod -g "${odgroup}" "${oduser}" grep -qv root <( groups "${oduser}" ) || { echo 'ROOT level privileges prohibited!'; exit 1; } fi chown "${oduser}:${odgroup}" /onedrive/ /onedrive/conf # Default parameters ARGS=(--monitor --confdir /onedrive/conf --syncdir /onedrive/data) # Make Verbose output optional, based on an environment variable if [ "${ONEDRIVE_VERBOSE:=0}" == "1" ]; then echo "# We are being verbose" ARGS=(--verbose ${ARGS[@]}) fi # Tell client to perform debug output, based on an environment variable if [ "${ONEDRIVE_DEBUG:=0}" == "1" ]; then echo "# We are performing debug output" ARGS=(--verbose --verbose ${ARGS[@]}) fi # Tell client to perform a resync based on environment variable if [ "${ONEDRIVE_RESYNC:=0}" == "1" ]; then echo "# We are performing a --resync" ARGS=(--resync ${ARGS[@]}) fi if [ ${#} -gt 0 ]; then ARGS=("${@}") fi exec gosu "${oduser}" /usr/local/bin/onedrive "${ARGS[@]}" onedrive-2.3.13/contrib/docker/hooks/000077500000000000000000000000001360252424000174435ustar00rootroot00000000000000onedrive-2.3.13/contrib/docker/hooks/post_push000077500000000000000000000002171360252424000214150ustar00rootroot00000000000000#!/bin/bash BUILD_DATE=`date "+%Y%m%d%H%M"` docker tag ${IMAGE_NAME} "${IMAGE_NAME}-${BUILD_DATE}" docker push "${IMAGE_NAME}-${BUILD_DATE}" onedrive-2.3.13/contrib/gentoo/000077500000000000000000000000001360252424000163445ustar00rootroot00000000000000onedrive-2.3.13/contrib/gentoo/onedrive-2.3.13.ebuild000066400000000000000000000013551360252424000220730ustar00rootroot00000000000000# Copyright 1999-2018 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 EAPI=6 DESCRIPTION="Onedrive sync client for Linux" HOMEPAGE="https://github.com/abraunegg/onedrive" SRC_URI="https://github.com/abraunegg/onedrive/archive/v${PV}.tar.gz -> ${P}.tar.gz" LICENSE="GPL-3" SLOT="0" KEYWORDS="~amd64 ~x86" IUSE="" DEPEND=" >=dev-lang/dmd-2.081.1 dev-db/sqlite " RDEPEND="${DEPEND} net-misc/curl " src_prepare() { default # Copy line 38 to 44 as systemd path needs to be created in portage sandbox # Update the makefile so that it doesnt use git commands to get the version during build. sed -i -e "38h; 44p; 44x" \ -e "s/version:.*/version:/" \ -e "\$s/.*/\techo v${PV} > version/" \ Makefile } onedrive-2.3.13/contrib/init.d/000077500000000000000000000000001360252424000162365ustar00rootroot00000000000000onedrive-2.3.13/contrib/init.d/onedrive.init000066400000000000000000000026041360252424000207400ustar00rootroot00000000000000#!/bin/sh # # chkconfig: 2345 20 80 # description: Starts and stops OneDrive Free Client # # Source function library. if [ -f /etc/init.d/functions ] ; then . /etc/init.d/functions elif [ -f /etc/rc.d/init.d/functions ] ; then . /etc/rc.d/init.d/functions else exit 1 fi # Source networking configuration. . /etc/sysconfig/network # Check that networking is up. [ ${NETWORKING} = "no" ] && exit 1 APP_NAME="OneDrive Free Client" STOP_TIMEOUT=${STOP_TIMEOUT-5} RETVAL=0 start() { export PATH=/usr/local/bin/:$PATH echo -n "Starting $APP_NAME: " daemon --user root onedrive_service.sh RETVAL=$? echo [ $RETVAL -eq 0 ] && touch /var/lock/subsys/onedrive || \ RETVAL=1 return $RETVAL } stop() { echo -n "Shutting down $APP_NAME: " killproc onedrive RETVAL=$? echo [ $RETVAL = 0 ] && rm -f /var/lock/subsys/onedrive ${pidfile} } restart() { stop start } rhstatus() { status onedrive return $? } # Allow status as non-root. if [ "$1" = status ]; then rhstatus exit $? fi case "$1" in start) start ;; stop) stop ;; restart) restart ;; reload) reload ;; status) rhstatus ;; *) echo "Usage: $0 {start|stop|restart|reload|status}" exit 2 esac exit $? onedrive-2.3.13/contrib/init.d/onedrive_service.sh000066400000000000000000000002741360252424000221300ustar00rootroot00000000000000#!/bin/bash # This script is to assist in starting the onedrive client when using init.d APP_OPTIONS="--monitor --verbose --enable-logging" onedrive $APP_OPTIONS > /dev/null 2>&1 & exit 0 onedrive-2.3.13/contrib/logrotate/000077500000000000000000000000001360252424000170515ustar00rootroot00000000000000onedrive-2.3.13/contrib/logrotate/onedrive.logrotate000066400000000000000000000010721360252424000226060ustar00rootroot00000000000000# Any OneDrive Client logs configured for here /var/log/onedrive/*log { # What user / group should logrotate use? # Logrotate 3.8.9 or greater required otherwise: # "unknown option 'su' -- ignoring line" is generated su root users # rotate log files weekly weekly # keep 4 weeks worth of backlogs rotate 4 # create new (empty) log files after rotating old ones create # use date as a suffix of the rotated file dateext # compress the log files compress # missing files OK missingok } onedrive-2.3.13/contrib/pacman/000077500000000000000000000000001360252424000163105ustar00rootroot00000000000000onedrive-2.3.13/contrib/pacman/PKGBUILD.in000066400000000000000000000020101360252424000200320ustar00rootroot00000000000000pkgname=onedrive pkgver=@PACKAGE_VERSION@ pkgrel=1 #patch-level (Increment this when patch is applied) pkgdesc="A free OneDrive Client for Linux. This is a fork of the https://github.com/skilion/onedrive repository" license=("unknown") url="https://github.com/abraunegg/onedrive/" arch=("i686" "x86_64") depends=("curl" "gcc-libs" "glibc" "sqlite") makedepends=("dmd" "git" "tar") prepare() { cd "$srcdir" wget "https://github.com/abraunegg/onedrive/archive/v$pkgver.tar.gz" -O "$pkgname-$pkgver-patch-$pkgrel.tar.gz" #Pull last commit release tar -xzf "$pkgname-$pkgver-patch-$pkgrel.tar.gz" --one-top-level="$pkgname-$pkgver-patch-$pkgrel" --strip-components 1 } build() { cd "$pkgname-$pkgver-patch-$pkgrel" git init #Create .git folder required from Makefile git add * #Create .git/index git commit --allow-empty-message -m "" #Create .git/refs/heads/master git tag v$pkgver #Add version tag make PREFIX=/usr onedrive } package() { cd "$pkgname-$pkgver-patch-$pkgrel" make PREFIX=/usr DESTDIR="$pkgdir" install } onedrive-2.3.13/contrib/spec/000077500000000000000000000000001360252424000160035ustar00rootroot00000000000000onedrive-2.3.13/contrib/spec/onedrive.spec.in000066400000000000000000000057541360252424000211120ustar00rootroot00000000000000# Determine based on distribution & version what options & packages to include %if 0%{?fedora} || 0%{?rhel} >= 7 %global with_systemd 1 %else %global with_systemd 0 %endif Name: onedrive Version: @PACKAGE_VERSION@ Release: 1%{?dist} Summary: Microsoft OneDrive Client Group: System Environment/Network License: GPLv3 URL: https://github.com/abraunegg/onedrive #Source0: %{name}-%{version}.tar.gz BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) BuildRequires: git BuildRequires: dmd >= 2.085.0 BuildRequires: sqlite-devel >= 3.7.15 BuildRequires: libcurl-devel Requires: sqlite >= 3.7.15 Requires: libcurl %if 0%{?with_systemd} Requires(post): systemd Requires(preun): systemd Requires(postun): systemd %else Requires(post): chkconfig Requires(preun): chkconfig Requires(preun): initscripts Requires(postun): initscripts %endif %define debug_package %{nil} %description Microsoft OneDrive Client for Linux %prep %setup -c -D -T # This creates cd %{_builddir}/%{name}-%{version}/ # clone the repository git clone https://github.com/abraunegg/onedrive.git . # We should now have %{_builddir}/%{name}-%{version} with the git clone %build cd %{_builddir}/%{name}-%{version} make %install # Make the destination directories %{__mkdir_p} %{buildroot}/etc/ %{__mkdir_p} %{buildroot}/usr/bin/ %{__mkdir_p} %{buildroot}/etc/logrotate.d cp %{_builddir}/%{name}-%{version}/onedrive %{buildroot}/usr/bin/onedrive cp %{_builddir}/%{name}-%{version}/logrotate/onedrive.logrotate %{buildroot}/etc/logrotate.d/onedrive %if 0%{?with_systemd} %{__mkdir_p} %{buildroot}/%{_unitdir} cp %{_builddir}/%{name}-%{version}/onedrive.service %{buildroot}/%{_unitdir}/onedrive.service cp %{_builddir}/%{name}-%{version}/onedrive.service %{buildroot}/%{_unitdir}/onedrive@.service %else %{__mkdir_p} %{buildroot}%{_initrddir} cp %{_builddir}/%{name}-%{version}/init.d/onedrive_service.sh %{buildroot}/usr/bin/onedrive_service.sh cp %{_builddir}/%{name}-%{version}/init.d/onedrive.init %{buildroot}%{_initrddir}/onedrive %endif %clean %files %defattr(0444,root,root,0755) %attr(0555,root,root) /usr/bin/onedrive %attr(0644,root,root) /etc/logrotate.d/onedrive %if 0%{?with_systemd} %attr(0555,root,root) %{_unitdir}/onedrive.service %attr(0555,root,root) %{_unitdir}/onedrive@.service %else %attr(0555,root,root) /usr/bin/onedrive_service.sh %attr(0555,root,root) %{_initrddir}/onedrive %endif %pre rm -f /root/.config/onedrive/items.db rm -f /root/.config/onedrive/items.sqlite3 rm -f /root/.config/onedrive/resume_upload %post mkdir -p /root/.config/onedrive mkdir -p /root/OneDrive mkdir -p /var/log/onedrive chown root.users /var/log/onedrive chmod 0775 /var/log/onedrive %if 0%{?with_systemd} %systemd_post onedrive.service %else chkconfig --add onedrive chkconfig onedrive off %endif %preun %if 0%{?with_systemd} %systemd_preun onedrive.service %else if [ $1 -eq 0 ] ; then service onedrive stop &> /dev/null chkconfig --del onedrive &> /dev/null fi %endif %changelog onedrive-2.3.13/contrib/systemd/000077500000000000000000000000001360252424000165415ustar00rootroot00000000000000onedrive-2.3.13/contrib/systemd/onedrive.service.in000066400000000000000000000004471360252424000223500ustar00rootroot00000000000000[Unit] Description=OneDrive Free Client Documentation=https://github.com/abraunegg/onedrive After=network-online.target Wants=network-online.target [Service] ExecStart=@prefix@/bin/onedrive --monitor Restart=on-failure RestartSec=3 RestartPreventExitStatus=3 [Install] WantedBy=default.target onedrive-2.3.13/contrib/systemd/onedrive@.service.in000066400000000000000000000005511360252424000224440ustar00rootroot00000000000000[Unit] Description=OneDrive Free Client for %i Documentation=https://github.com/abraunegg/onedrive After=network-online.target Wants=network-online.target [Service] ExecStart=@prefix@/bin/onedrive --monitor --confdir=/home/%i/.config/onedrive User=%i Group=users Restart=on-failure RestartSec=3 RestartPreventExitStatus=3 [Install] WantedBy=multi-user.target onedrive-2.3.13/docs/000077500000000000000000000000001360252424000143415ustar00rootroot00000000000000onedrive-2.3.13/docs/Docker.md000066400000000000000000000140121360252424000160700ustar00rootroot00000000000000# onedrive docker image Thats right folks onedrive is now dockerized ;) This container offers simple monitoring-mode service for 'Free Client for OneDrive on Linux'. ## Basic Setup ### 0. Install docker under your own platform's instructions ### 1. Pull the image ```bash docker pull driveone/onedrive ``` **NOTE:** SELinux context needs to be configured or disabled for Docker, to be able to write to OneDrive host directory. ### 2. Prepare config volume Onedrive needs two volumes. One of them is the config volume. Create it with: ```bash docker volume create onedrive_conf ``` This will create a docker volume labeled `onedrive_conf`, where all configuration of your onedrive account will be stored. You can add a custom config file and other things later. The second docker volume is for your data folder and is created in the next step. It needs the path to a folder on your filesystem that you want to keep in sync with OneDrive. Keep in mind that: - The owner of your specified folder must not be root - The owner of your specified folder must have permissions for its parent directory ### 3. First run Onedrive needs to be authorized with your Microsoft account. This is achieved by running docker in interactive mode. Run the docker image with the two commands below and **make sure to change `onedriveDir` to the onedrive data directory on your filesystem (e.g. `"/home/abraunegg/OneDrive"`)** ```bash onedriveDir="${HOME}/OneDrive" docker run -it --restart unless-stopped --name onedrive -v onedrive_conf:/onedrive/conf -v "${onedriveDir}:/onedrive/data" driveone/onedrive ``` - You will be asked to open a specific link using your web browser - Login to your Microsoft Account and give the application the permission - After giving the permission, you will be redirected to a blank page. - Copy the URI of the blank page into the application. The onedrive monitor is configured to start with your host system. If your onedrive is working as expected, you can detach from the container with Ctrl+p, Ctrl+q. ### 4. Status, stop, and restart Check if the monitor service is running ```bash docker ps -f name=onedrive ``` Show monitor run logs ```bash docker logs onedrive ``` Stop running monitor ```bash docker stop onedrive ``` Resume monitor ```bash docker start onedrive ``` Remove onedrive monitor ```bash docker rm -f onedrive ``` ## Advanced Setup ### 5. Edit the config Onedrive should run in default configuration, however you can change your configuration by placing a custom config file in the `onedrive_conf` docker volume. First download the default config from [here](https://raw.githubusercontent.com/abraunegg/onedrive/master/config) Then put it into your onedrive_conf volume path, which can be found with: ```bash docker volume inspect onedrive_conf ``` Or you can map your own config folder to the config volume. Make sure to copy all files from the docker volume into your mapped folder first. The detailed document for the config can be found here: [additional-configuration](https://github.com/abraunegg/onedrive#additional-configuration) ### 6. Sync multiple accounts There are many ways to do this, the easiest is probably to 1. Create a second docker config volume (replace `Work` with your desired name): `docker volume create onedrive_conf_Work` 2. And start a second docker monitor container (again replace `Work` with your desired name): ``` onedriveDirWork="/home/abraunegg/OneDriveWork" docker run -it --restart unless-stopped --name onedrive_Work -v onedrive_conf_Work:/onedrive/conf -v "${onedriveDirWork}:/onedrive/data" driveone/onedrive ``` ## Run or update with one script If you are experienced with docker and onedrive, you can use the following script: ```bash # Update onedriveDir with correct existing OneDrive directory path onedriveDir="${HOME}/OneDrive" firstRun='-d' docker pull driveone/onedrive docker inspect onedrive_conf > /dev/null || { docker volume create onedrive_conf; firstRun='-it'; } docker inspect onedrive > /dev/null && docker rm -f onedrive docker run $firstRun --restart unless-stopped --name onedrive -v onedrive_conf:/onedrive/conf -v "${onedriveDir}:/onedrive/data" driveone/onedrive ``` ## Environment Variables | Variable | Purpose | Sample Value | | ---------------- | --------------------------------------------------- |:-------------:| | ONEDRIVE_UID | UserID (UID) to run as | 1000 | | ONEDRIVE_GID | GroupID (GID) to run as | 1000 | | ONEDRIVE_VERBOSE | Controls "--verbose" switch on onedrive sync. Default is 0 | 1 | | ONEDRIVE_DEBUG | Controls "--verbose --verbose" switch on onedrive sync. Default is 0 | 1 | | ONEDRIVE_RESYNC | Controls "--resync" switch on onedrive sync. Default is 0 | 1 | ### Usage Examples **Verbose Output:** ```bash docker container run -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf -v "${onedriveDir}:/onedrive/data" driveone/onedrive ``` **Debug Output:** ```bash docker container run -e ONEDRIVE_DEBUG=1 -v onedrive_conf:/onedrive/conf -v "${onedriveDir}:/onedrive/data" driveone/onedrive ``` **Perform a --resync:** ```bash docker container run -e ONEDRIVE_RESYNC=1 -v onedrive_conf:/onedrive/conf -v "${onedriveDir}:/onedrive/data" driveone/onedrive ``` **Perform a --resync and --verbose:** ```bash docker container run -e ONEDRIVE_RESYNC=1 -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf -v "${onedriveDir}:/onedrive/data" driveone/onedrive ``` ## Build instructions You can also build your own image instead of pulling the one from dockerhub: ```bash git clone https://github.com/abraunegg/onedrive cd onedrive docker build . -t local-onedrive -f contrib/docker/Dockerfile ``` There are alternate, smaller images available by building Dockerfile-stretch or Dockerfile-alpine. These [multi-stage builder pattern](https://docs.docker.com/develop/develop-images/multistage-build/) Dockerfiles require Docker version at least 17.05. ``` bash docker build . -t local-ondrive-stretch -f contrib/docker/Dockerfile-stretch ``` or ``` bash docker build . -t local-ondrive-alpine -f contrib/docker/Dockerfile-alpine ``` onedrive-2.3.13/docs/INSTALL.md000066400000000000000000000243401360252424000157740ustar00rootroot00000000000000# Building and Installing the OneDrive Free Client ## Linux Packages This project has been packaged for the following Linux distributions: * Fedora, simply install with `sudo dnf install onedrive` * Arch Linux, available from AUR as [onedrive-abraunegg](https://aur.archlinux.org/packages/onedrive-abraunegg/) **Important:** Distribution packages may be of an older release when compared to the latest release that is [available](https://github.com/abraunegg/onedrive/releases). If a package is out out date, please contact the package maintainer for resolution. ## Build Requirements * Build environment must have at least 1GB of memory & 1GB swap space * [libcurl](http://curl.haxx.se/libcurl/) * [SQLite 3](https://www.sqlite.org/) >= 3.7.15 * [Digital Mars D Compiler (DMD)](http://dlang.org/download.html) **Note:** DMD version >= 2.083.1 or LDC version >= 1.12.0 is required to compile this application ### Dependencies: Ubuntu 16.x - i386 / i686 (less than 1GB Memory) **Important:** Build environment must have at least 512 of memory & 1GB swap space **Important:** Only use this method if you have <1GB of physical memory. **Note:** Peppermint 7 validated with the DMD compiler on the following i386 / i686 platform: ```text DISTRIB_ID=Peppermint DISTRIB_RELEASE=7 DISTRIB_CODENAME=xenial DISTRIB_DESCRIPTION="Peppermint 7 Seven" ``` First install development dependencies as per below: ```text sudo apt install build-essential sudo apt install libcurl4-openssl-dev sudo apt install libsqlite3-dev sudo apt install pkg-config sudo apt install git sudo apt install curl ``` For notifications the following is also necessary: ```text sudo apt install libnotify-dev ``` Second, install the DMD compiler as per below: ```text sudo wget http://master.dl.sourceforge.net/project/d-apt/files/d-apt.list -O /etc/apt/sources.list.d/d-apt.list sudo apt-get update && sudo apt-get -y --allow-unauthenticated install --reinstall d-apt-keyring sudo apt-get update && sudo apt-get install dmd-compiler dub ``` ### Dependencies: Ubuntu 16.x - i386 / i686 (1GB Memory) **Note:** Ubuntu 16.x validated with the DMD compiler on the following Ubuntu i386 / i686 platform: ```text DISTRIB_ID=Ubuntu DISTRIB_RELEASE=16.04 DISTRIB_CODENAME=xenial DISTRIB_DESCRIPTION="Ubuntu 16.04.6 LTS" ``` First install development dependencies as per below: ```text sudo apt install build-essential sudo apt install libcurl4-openssl-dev sudo apt install libsqlite3-dev sudo apt install pkg-config sudo apt install git sudo apt install curl ``` For notifications the following is also necessary: ```text sudo apt install libnotify-dev ``` Second, install the DMD compiler as per below: ```text curl -fsS https://dlang.org/install.sh | bash -s dmd ``` ### Dependencies: Ubuntu 18.x / Lubuntu 18.x / Debian 9 - i386 / i686 **Important:** The DMD compiler cannot be used in its default configuration on Ubuntu 18.x / Lubuntu 18.x / Debian 9 i386 / i686 architectures due to an issue in the Ubuntu / Debian linking process. See [https://issues.dlang.org/show_bug.cgi?id=19116](https://issues.dlang.org/show_bug.cgi?id=19116) for further details. **Note:** Ubuntu 18.x validated with the DMD compiler on the following Ubuntu i386 / i686 platform: ```text DISTRIB_ID=Ubuntu DISTRIB_RELEASE=18.04 DISTRIB_CODENAME=bionic DISTRIB_DESCRIPTION="Ubuntu 18.04.3 LTS" ``` **Note:** Lubuntu 18.x validated with the DMD compiler on the following Lubuntu i386 / i686 platform: ```text DISTRIB_ID=Ubuntu DISTRIB_RELEASE=18.10 DISTRIB_CODENAME=cosmic DISTRIB_DESCRIPTION="Ubuntu 18.10" ``` **Note:** Debian 9 validated with the DMD compiler on the following Debian i386 / i686 platform: ```text cat /etc/debian_version 9.11 ``` First install development dependencies as per below: ```text sudo apt install build-essential sudo apt install libcurl4-openssl-dev sudo apt install libsqlite3-dev sudo apt install pkg-config sudo apt install git sudo apt install curl ``` For notifications the following is also necessary: ```text sudo apt install libnotify-dev ``` Second, install the DMD compiler as per below: ```text curl -fsS https://dlang.org/install.sh | bash -s dmd ``` Thirdly, reconfigure the default linker as per below: ```text sudo update-alternatives --install "/usr/bin/ld" "ld" "/usr/bin/ld.gold" 20 sudo update-alternatives --install "/usr/bin/ld" "ld" "/usr/bin/ld.bfd" 10 ``` ### Dependencies: Ubuntu 18.x, Ubuntu 19.x / Debian 9, Debian 10 - x86_64 ```text sudo apt install build-essential sudo apt install libcurl4-openssl-dev sudo apt install libsqlite3-dev sudo apt install pkg-config sudo apt install git sudo apt install curl curl -fsS https://dlang.org/install.sh | bash -s dmd ``` For notifications the following is also necessary: ```text sudo apt install libnotify-dev ``` ### Dependencies: CentOS 6.x / RHEL 6.x ```text sudo yum groupinstall 'Development Tools' sudo yum install libcurl-devel sudo yum install sqlite-devel curl -fsS https://dlang.org/install.sh | bash -s dmd ``` For notifications the following is also necessary: ```text sudo yum install libnotify-devel ``` In addition to the above requirements, the `sqlite` version used on CentOS 6.x / RHEL 6.x needs to be upgraded. Use the following instructions to update your version of `sqlite` so that it can support this client: ```text sudo yum -y update sudo yum -y install epel-release wget sudo yum -y install mock wget https://kojipkgs.fedoraproject.org//packages/sqlite/3.7.15.2/2.fc19/src/sqlite-3.7.15.2-2.fc19.src.rpm mock --rebuild sqlite-3.7.15.2-2.fc19.src.rpm sudo yum -y upgrade /var/lib/mock/epel-6-`arch`/result/sqlite-* ``` ### Dependencies: Fedora < Version 18 / CentOS 7.x / RHEL 7.x ```text sudo yum groupinstall 'Development Tools' sudo yum install libcurl-devel sudo yum install sqlite-devel curl -fsS https://dlang.org/install.sh | bash -s dmd ``` For notifications the following is also necessary: ```text sudo yum install libnotify-devel ``` ### Dependencies: Fedora > Version 18 / CentOS 8.x / RHEL 8.x ```text sudo dnf groupinstall 'Development Tools' sudo dnf install libcurl-devel sudo dnf install sqlite-devel curl -fsS https://dlang.org/install.sh | bash -s dmd ``` For notifications the following is also necessary: ```text sudo dnf install libnotify-devel ``` ### Dependencies: Arch Linux ```text sudo pacman -S curl sqlite dmd ``` For notifications the following is also necessary: ```text sudo pacman -S libnotify ``` ### Dependencies: Raspbian (ARMHF) ```text sudo apt-get install libcurl4-openssl-dev sudo apt-get install libsqlite3-dev sudo apt-get install libxml2 sudo apt-get install pkg-config wget https://github.com/ldc-developers/ldc/releases/download/v1.16.0/ldc2-1.16.0-linux-armhf.tar.xz tar -xvf ldc2-1.16.0-linux-armhf.tar.xz ``` For notifications the following is also necessary: ```text sudo apt install libnotify-dev ``` ### Dependencies: Debian (ARM64) ```text sudo apt-get install libcurl4-openssl-dev sudo apt-get install libsqlite3-dev sudo apt-get install libxml2 sudo apt-get install pkg-config wget https://github.com/ldc-developers/ldc/releases/download/v1.16.0/ldc2-1.16.0-linux-aarch64.tar.xz tar -xvf ldc2-1.16.0-linux-aarch64.tar.xz ``` For notifications the following is also necessary: ```text sudo apt install libnotify-dev ``` ### Dependencies: Gentoo ```text sudo emerge app-portage/layman sudo layman -a dlang ``` Add ebuild from contrib/gentoo to a local overlay to use. For notifications the following is also necessary: ```text sudo emerge x11-libs/libnotify ``` ### Dependencies: OpenSuSE Leap 15.0 ```text sudo zypper addrepo --check --refresh --name "D" http://download.opensuse.org/repositories/devel:/languages:/D/openSUSE_Leap_15.0/devel:languages:D.repo sudo zypper install git libcurl-devel sqlite3-devel D:dmd D:libphobos2-0_81 D:phobos-devel D:phobos-devel-static ``` For notifications the following is also necessary: ```text sudo zypper install libnotify-devel ``` ## Compilation & Installation ### Building using DMD Reference Compiler Before cloning and compiling, if you have installed DMD via curl for your OS, you will need to activate DMD as per example below: ```text Run `source ~/dlang/dmd-2.081.1/activate` in your shell to use dmd-2.081.1. This will setup PATH, LIBRARY_PATH, LD_LIBRARY_PATH, DMD, DC, and PS1. Run `deactivate` later on to restore your environment. ``` Without performing this step, the compilation process will fail. **Note:** Depending on your DMD version, substitute `2.081.1` above with your DMD version that is installed. ```text git clone https://github.com/abraunegg/onedrive.git cd onedrive ./configure make clean; make; sudo make install ``` ### Build options Notifications can be enabled using the `configure` switch `--enable-notifications`. Systemd service files are installed in the appropriate directories on the system, as provided by `pkg-config systemd` settings. If the need for overriding the deduced path are necessary, the two options `--with-systemdsystemunitdir` (for the Systemd system unit location), and `--with-systemduserunitdir` (for the Systemd user unit location) can be specified. Passing in `no` to one of these options disabled service file installation. By passing `--enable-debug` to the `configure` call, `onedrive` gets built with additional debug information, useful (for example) to get `perf`-issued figures. By passing `--enable-completions` to the `configure` call, shell completion functions are installed for `bash` and `zsh`. The installation directories are determined as far as possible automatically, but can be overridden by passing `--with-bash-completion-dir=` and `--with-zsh-completion-dir=` to `configure`. ### Building using a different compiler (for example [LDC](https://wiki.dlang.org/LDC)) #### ARMHF Architecture ```text git clone https://github.com/abraunegg/onedrive.git cd onedrive ./configure DC=~/ldc2-1.16.0-linux-armhf/bin/ldmd2 make clean; make sudo make install ``` #### ARM64 Architecture ```text git clone https://github.com/abraunegg/onedrive.git cd onedrive ./configure DC=~/ldc2-1.16.0-linux-aarch64/bin/ldmd2 make clean; make sudo make install ``` ## Uninstall ```text sudo make uninstall # delete the application state rm -rf ~/.config/onedrive ``` If you are using the `--confdir option`, substitute `~/.config/onedrive` above for that directory. If you want to just delete the application key, but keep the items database: ```text rm -f ~/.config/onedrive/refresh_token ``` onedrive-2.3.13/docs/Office365.md000066400000000000000000000015421360252424000163160ustar00rootroot00000000000000# Show how to access a Sharepoint group drive in Office 365 business or education ## Obtaining the Sharepoint Site Details 1. Login to OneDrive and under 'Shared Libraries' obtain the shared library name 2. Run the following command using the 'onedrive' client ```text onedrive --get-O365-drive-id '' ``` 3. This will return the following: ```text Initializing the Synchronization Engine ... Office 365 Library Name Query: SiteName: drive_id: b!6H_y8B...xU5 URL: ``` ## Configuring the onedrive client Once you have obtained the 'drive_id' above, add to your 'onedrive' configuration file (`~/.config/onedrive/config`)the following: ```text drive_id = "insert the drive id from above here" ``` The OneDrive client will now sync this SharePoint shared library to your local system. onedrive-2.3.13/docs/USAGE.md000066400000000000000000000721351360252424000155370ustar00rootroot00000000000000# Configuration and Usage of the OneDrive Free Client ## Using the client ### Upgrading from 'skilion' client The 'skilion' version contains a significant number of defects in how the local sync state is managed. When upgrading from the 'skilion' version to this version, it is advisable to stop any service / onedrive process from running and then remove any `items.sqlite3` file from your configuration directory (`~/.config/onedrive/`) as this will force the creation of a new local cache file. Additionally, if you are using a 'config' file within your configuration directory (`~/.config/onedrive/`), please ensure that you update the `skip_file = ` option as per below: **Invalid configuration:** ```text skip_file = ".*|~*" ``` **Minimum valid configuration:** ```text skip_file = "~*" ``` **Default valid configuration:** ```text skip_file = "~*|.~*|*.tmp" ``` Do not use a skip_file entry of `.*` as this will prevent correct searching of local changes to process. ### Local File and Folder Naming Conventions The files and directories in the synchronization directory must follow the [Windows naming conventions](https://msdn.microsoft.com/en-us/library/aa365247). The application will attempt to handle instances where you have two files with the same names but with different capitalization. Where there is a namespace clash, the file name which clashes will not be synced. This is expected behavior and won't be fixed. ### curl compatibility If your system utilises curl >= 7.62.0 curl defaults to prefer HTTP/2 over HTTP/1.1 by default. If you wish to use HTTP/2 for some operations you will need to use the `--force-http-2` config option to enable otherwise all operations will use HTTP/1.1. ### First run :zap: After installing the application you must run it at least once from the terminal to authorize it. You will be asked to open a specific link using your web browser where you will have to login into your Microsoft Account and give the application the permission to access your files. After giving the permission, you will be redirected to a blank page. Copy the URI of the blank page into the application. ```text [user@hostname ~]$ onedrive Authorize this app visiting: https://..... Enter the response uri: ``` ### Show your configuration To validate your configuration the application will use, utilize the following: ```text onedrive --display-config ``` This will display all the pertinent runtime interpretation of the options and configuration you are using. This is helpful to validate the client will perform the operations your asking without performing a sync. Example output is as follows: ```text Config path = /home/alex/.config/onedrive Config file found in config path = false Config option 'sync_dir' = /home/alex/OneDrive Config option 'skip_dir' = Config option 'skip_file' = ~*|.~*|*.tmp Config option 'skip_dotfiles' = false Config option 'skip_symlinks' = false Config option 'monitor_interval' = 45 Config option 'min_notify_changes' = 5 Config option 'log_dir' = /var/log/onedrive/ Selective sync configured = false ``` ### Testing your configuration You are able to test your configuration by utilising the `--dry-run` CLI option. No files will be downloaded, uploaded or removed, however the application will display what 'would' have occurred. For example: ```text onedrive --synchronize --verbose --dry-run DRY-RUN Configured. Output below shows what 'would' have occurred. Loading config ... Using Config Dir: /home/user/.config/onedrive Initializing the OneDrive API ... Opening the item database ... All operations will be performed in: /home/user/OneDrive Initializing the Synchronization Engine ... Account Type: personal Default Drive ID: Default Root ID: Remaining Free Space: 5368709120 Fetching details for OneDrive Root OneDrive Root exists in the database Syncing changes from OneDrive ... Applying changes of Path ID: Uploading differences of . Processing root The directory has not changed Uploading new items of . OneDrive Client requested to create remote path: ./newdir The requested directory to create was not found on OneDrive - creating remote directory: ./newdir Successfully created the remote directory ./newdir on OneDrive Uploading new file ./newdir/newfile.txt ... done. Remaining free space: 5368709076 Applying changes of Path ID: ``` **Note:** `--dry-run` can only be used with `--synchronize`. It cannot be used with `--monitor` and will be ignored. ### Performing a sync By default all files are downloaded in `~/OneDrive`. After authorizing the application, a sync of your data can be performed by running: ```text onedrive --synchronize ``` This will synchronize files from your OneDrive account to your `~/OneDrive` local directory. If you prefer to use your local files as stored in `~/OneDrive` as the 'source of truth' use the following sync command: ```text onedrive --synchronize --local-first ``` ### Performing a selective directory sync In some cases it may be desirable to sync a single directory under ~/OneDrive without having to change your client configuration. To do this use the following command: ```text onedrive --synchronize --single-directory '' ``` Example: If the full path is `~/OneDrive/mydir`, the command would be `onedrive --synchronize --single-directory 'mydir'` ### Performing a 'one-way' download sync In some cases it may be desirable to 'download only' from OneDrive. To do this use the following command: ```text onedrive --synchronize --download-only ``` ### Performing a 'one-way' upload sync In some cases it may be desirable to 'upload only' to OneDrive. To do this use the following command: ```text onedrive --synchronize --upload-only ``` ### Increasing logging level When running a sync it may be desirable to see additional information as to the progress and operation of the client. To do this, use the following command: ```text onedrive --synchronize --verbose ``` ### Client Activity Log When running onedrive all actions can be logged to a separate log file. This can be enabled by using the `--enable-logging` flag. By default, log files will be written to `/var/log/onedrive/` **Note:** You will need to ensure the existence of this directory, and that your user has the applicable permissions to write to this directory or the following warning will be printed: ```text Unable to access /var/log/onedrive/ Please manually create '/var/log/onedrive/' and set appropriate permissions to allow write access The requested client activity log will instead be located in the users home directory ``` On many systems this can be achieved by ```text mkdir /var/log/onedrive chown root.users /var/log/onedrive chmod 0775 /var/log/onedrive ``` All log files will be in the format of `%username%.onedrive.log`, where `%username%` represents the user who ran the client. **Note:** To use a different log directory rather than the default above, add the following as a configuration option to `~/.config/onedrive/config`: ```text log_dir = "/path/to/location/" ``` Trailing slash required An example of the log file is below: ```text 2018-Apr-07 17:09:32.1162837 Loading config ... 2018-Apr-07 17:09:32.1167908 No config file found, using defaults 2018-Apr-07 17:09:32.1170626 Initializing the OneDrive API ... 2018-Apr-07 17:09:32.5359143 Opening the item database ... 2018-Apr-07 17:09:32.5515295 All operations will be performed in: /root/OneDrive 2018-Apr-07 17:09:32.5518387 Initializing the Synchronization Engine ... 2018-Apr-07 17:09:36.6701351 Applying changes of Path ID: 2018-Apr-07 17:09:37.4434282 Adding OneDrive Root to the local database 2018-Apr-07 17:09:37.4478342 The item is already present 2018-Apr-07 17:09:37.4513752 The item is already present 2018-Apr-07 17:09:37.4550062 The item is already present 2018-Apr-07 17:09:37.4586444 The item is already present 2018-Apr-07 17:09:37.7663571 Adding OneDrive Root to the local database 2018-Apr-07 17:09:37.7739451 Fetching details for OneDrive Root 2018-Apr-07 17:09:38.0211861 OneDrive Root exists in the database 2018-Apr-07 17:09:38.0215375 Uploading differences of . 2018-Apr-07 17:09:38.0220464 Processing 2018-Apr-07 17:09:38.0224884 The directory has not changed 2018-Apr-07 17:09:38.0229369 Processing 2018-Apr-07 17:09:38.02338 The directory has not changed 2018-Apr-07 17:09:38.0237678 Processing 2018-Apr-07 17:09:38.0242285 The directory has not changed 2018-Apr-07 17:09:38.0245977 Processing 2018-Apr-07 17:09:38.0250788 The directory has not changed 2018-Apr-07 17:09:38.0254657 Processing 2018-Apr-07 17:09:38.0259923 The directory has not changed 2018-Apr-07 17:09:38.0263547 Uploading new items of . 2018-Apr-07 17:09:38.5708652 Applying changes of Path ID: ``` ### Notifications If notification support is compiled in, the following events will trigger a notification within the display manager session: * Aborting a sync if .nosync file is found * Cannot create remote directory * Cannot upload file changes * Cannot delete remote file / folder * Cannot move remote file / folder ### Handling a OneDrive account password change If you change your OneDrive account password, the client will no longer be authorised to sync, and will generate the following error: ```text ERROR: OneDrive returned a 'HTTP 401 Unauthorized' - Cannot Initialize Sync Engine ``` To re-authorise the client, follow the steps below: 1. If running the client as a service (init.d or systemd), stop the service 2. Run the command `onedrive --logout`. This will clean up the previous authorisation, and will prompt you to re-authorise as per initial configuration. 3. Restart the client if running as a service or perform a manual sync The application will now sync with OneDrive with the new credentials. ## Configuration Configuration is determined by three layers: the default values, values set in the configuration file, and values passed in via the command line. The default values provide a reasonable default, and configuration is optional. Most command line options have a respective configuration file setting. If you want to change the defaults, you can copy and edit the included config file into your `~/.config/onedrive` directory: ```text mkdir -p ~/.config/onedrive cp ./config ~/.config/onedrive/config nano ~/.config/onedrive/config ``` This file does not get created by default, and should only be created if you want to change the 'default' operational parameters. See the [config](config) file for the full list of options, and [All available commands](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#all-available-commands) for all possible keys and there default values. The default configuration file is listed below: ```text # Configuration for OneDrive Linux Client # This file contains the list of supported configuration fields # with their default values. # All values need to be enclosed in quotes # When changing a config option below, remove the '#' from the start of the line # For explanations of all config options below see docs/USAGE.md or the man page. # # sync_dir = "~/OneDrive" # skip_file = "~*|.~*|*.tmp" # monitor_interval = "45" # skip_dir = "" # log_dir = "/var/log/onedrive/" # drive_id = "" # upload_only = "false" # check_nomount = "false" # check_nosync = "false" # download_only = "false" # disable_notifications = "false" # disable_upload_validation = "false" # enable_logging = "false" # force_http_11 = "false" # force_http_2 = "false" # local_first = "false" # no_remote_delete = "false" # skip_symlinks = "false" # debug_https = "false" # skip_dotfiles = "false" # dry_run = "false" # min_notify_changes = "5" # monitor_log_frequency = "5" # monitor_fullscan_frequency = "10" # sync_root_files = "false" # user_agent = "" ``` ### 'config' file configuration examples: The below are 'config' file examples to assist with configuration of the 'config' file: #### sync_dir Example: ```text # When changing a config option below, remove the '#' from the start of the line # For explanations of all config options below see docs/USAGE.md or the man page. # sync_dir="~/MyDirToSync" # skip_file = "~*|.~*|*.tmp" # monitor_interval = "45" # skip_dir = "" # log_dir = "/var/log/onedrive/" ``` **Please Note:** Proceed with caution here when changing the default sync dir from `~/OneDrive` to `~/MyDirToSync` The issue here is around how the client stores the sync_dir path in the database. If the config file is missing, or you don't use the `--syncdir` parameter - what will happen is the client will default back to `~/OneDrive` and 'think' that either all your data has been deleted - thus delete the content on OneDrive, or will start downloading all data from OneDrive into the default location. **Note:** After changing `sync_dir`, you must perform a full re-synchronization by adding `--resync` to your existing command line - for example: `onedrive --synchronize --resync` #### skip_dir Example: ```text # When changing a config option below, remove the '#' from the start of the line # For explanations of all config options below see docs/USAGE.md or the man page. # # sync_dir = "~/OneDrive" # skip_file = "~*|.~*|*.tmp" # monitor_interval = "45" skip_dir = "Desktop|Documents/IISExpress|Documents/SQL Server Management Studio|Documents/Visual Studio*|Documents/WindowsPowerShell" # log_dir = "/var/log/onedrive/" ``` Patterns are case insensitive. `*` and `?` [wildcards characters](https://technet.microsoft.com/en-us/library/bb490639.aspx) are supported. Use `|` to separate multiple patterns. **Important:** Entries under `skip_dir` are relative to your `sync_dir` path. **Note:** After changing `skip_dir`, you must perform a full re-synchronization by adding `--resync` to your existing command line - for example: `onedrive --synchronize --resync` #### skip_file Example: ```text # When changing a config option below, remove the '#' from the start of the line # For explanations of all config options below see docs/USAGE.md or the man page. # # sync_dir = "~/OneDrive" skip_file = "~*|Documents/OneNote*|Documents/config.xlaunch|myfile.ext" # monitor_interval = "45" # skip_dir = "" # log_dir = "/var/log/onedrive/" ``` Patterns are case insensitive. `*` and `?` [wildcards characters](https://technet.microsoft.com/en-us/library/bb490639.aspx) are supported. Use `|` to separate multiple patterns. Files can be skipped in the following fashion: * Specify a wildcard, eg: '*.txt' (skip all txt files) * Explicitly specify the filename and it's full path relative to your sync_dir, eg: 'path/to/file/filename.ext' * Explicitly specify the filename only and skip every instance of this filename, eg: 'filename.ext' By default, the following files will be skipped: * Files that start with ~ * Files that start with .~ (like .~lock.* files generated by LibreOffice) * Files that end in .tmp **Important:** Do not use a skip_file entry of `.*` as this will prevent correct searching of local changes to process. **Note:** after changing `skip_file`, you must perform a full re-synchronization by adding `--resync` to your existing command line - for example: `onedrive --synchronize --resync` #### skip_dotfiles Example: ```text # skip_symlinks = "false" # debug_https = "false" skip_dotfiles = "true" # dry_run = "false" # monitor_interval = "45" ``` Setting this to `"true"` will skip all .files and .folders while syncing. #### skip_symlinks Example: ```text # local_first = "false" # no_remote_delete = "false" skip_symlinks = "true" # debug_https = "false" # skip_dotfiles = "false" ``` Setting this to `"true"` will skip all symlinks while syncing. #### monitor_interval Example: ```text # skip_dotfiles = "false" # dry_run = "false" monitor_interval = "300" # min_notify_changes = "5" # monitor_log_frequency = "5" ``` The monitor interval is defined as the wait time 'between' sync's when running in monitor mode. By default without configuration, the monitor_interval is set to 45 seconds. Setting this value to 300 will run the sync process every 5 minutes. #### min_notify_changes Example: ```text # dry_run = "false" # monitor_interval = "45" min_notify_changes = "50" # monitor_log_frequency = "5" # monitor_fullscan_frequency = "10" ``` This option defines the minimum number of pending incoming changes necessary to trigger a desktop notification. This allows controlling the frequency of notifications. #### Selective sync via 'sync_list' file Selective sync allows you to sync only specific files and directories. To enable selective sync create a file named `sync_list` in `~/.config/onedrive`. Each line of the file represents a relative path from your `sync_dir`. All files and directories not matching any line of the file will be skipped during all operations. Here is an example of `sync_list`: ```text Backup Documents/latest_report.docx Work/ProjectX notes.txt Blender Cinema Soc Codes Textbooks Year 2 ``` **Note:** after changing the sync_list, you must perform a full re-synchronization by adding `--resync` to your existing command line - for example: `onedrive --synchronize --resync` ### How to 'skip' directories from syncing? There are several mechanisms available to 'skip' a directory from the sync process: * Utilise 'skip_dir' * Utilise 'sync_list' One further method is to add a '.nosync' empty file to any folder. When this file is present, adding `--check-for-nosync` to your command line will now make the sync process skip any folder where the '.nosync' file is present. To make this a permanent change to always skip folders when a '.nosync' empty file is present, add the following to your config file: Example: ```text # upload_only = "false" # check_nomount = "false" check_nosync = "true" # download_only = "false" # disable_notifications = "false" ``` ### Shared folders (OneDrive Personal) Folders shared with you can be synced by adding them to your OneDrive. To do that open your Onedrive, go to the Shared files list, right click on the folder you want to sync and then click on "Add to my OneDrive". ### Shared folders (OneDrive Business or Office 365) Currently not supported. ### SharePoint / Office 365 Shared Libraries Refer to [./Office365.md](Office365.md) for configuration assistance. ## Running 'onedrive' in 'monitor' mode Monitor mode (`--monitor`) allows the onedrive process to continually monitor your local file system for changes to files. Two common errors can occur when using monitor mode: * Intialisation failure * Unable to add a new inotify watch Both of these errors are local environment issues, where the following system variables need to be increased as the current system values are potentially too low: * `fs.file-max` * `fs.inotify.max_user_watches` To determine what these values are on your system use the following commands: ``` sysctl fs.file-max sysctl fs.inotify.max_user_watches ``` To make a change to these variables: ``` sudo sysctl fs.file-max= sudo sysctl fs.inotify.max_user_watches= ``` To make these changes permanent, refer to your OS reference documentation. ## Running 'onedrive' as a system service There are two ways that onedrive can be used as a service * via init.d * via systemd **Note:** If using the service files, you may need to increase the `fs.inotify.max_user_watches` value on your system to handle the number of files in the directory you are monitoring as the initial value may be too low. ### OneDrive service running as root user via init.d ```text chkconfig onedrive on service onedrive start ``` To see the logs run: ```text tail -f /var/log/onedrive/.onedrive.log ``` To change what 'user' the client runs under (by default root), manually edit the init.d service file and modify `daemon --user root onedrive_service.sh` for the correct user. ### OneDrive service running as root user via systemd (Arch, Ubuntu, Debian, OpenSuSE, Fedora) ```text systemctl --user enable onedrive systemctl --user start onedrive ``` To see the logs run: ```text journalctl --user-unit onedrive -f ``` ### OneDrive service running as root user via systemd (Red Hat Enterprise Linux, CentOS Linux) ```text systemctl enable onedrive systemctl start onedrive ``` To see the logs run: ```text journalctl onedrive -f ``` ### OneDrive service running as a non-root user via systemd (without notifications or GUI) In some cases it is desirable to run the OneDrive client as a service, but not running as the 'root' user. In this case, follow the directions below to configure the service for a non-root user. 1. As the user, who will be running the service, run the application in standalone mode, authorize the application for use & validate that the synchronization is working as expected: ```text onedrive --synchronize --verbose ``` 2. Once the application is validated and working for your user, as the 'root' user, where is your username from step 1 above. ```text systemctl enable onedrive@.service systemctl start onedrive@.service ``` 3. To view the status of the service running for the user, use the following: ```text systemctl status onedrive@.service ``` ### OneDrive service running as a non-root user via systemd (with notifications enabled) (Arch, Ubuntu, Debian, OpenSuSE, Fedora) In some cases you may wish to receive GUI notifications when using the client when logged in as a non-root user. In this case, follow the directions below: 1. Login via graphical UI as user you wish to enable the service for 2. Disable any `onedive@` service files for your username - eg: ```text sudo systemctl stop onedrive@alex.service sudo systemctl disable onedrive@alex.service ``` 3. Enable service as per the following: ```text systemctl --user enable onedrive systemctl --user start onedrive ``` To see the logs run: ```text journalctl --user-unit onedrive -f ``` **Note:** `systemctl --user` is not applicable for Red Hat Enterprise Linux (RHEL) or CentOS Linux platforms ## Additional Configuration ### Using multiple OneDrive accounts You can run multiple instances of the application by specifying a different config directory in order to handle multiple OneDrive accounts. For example, if you have a work and a personal account, you can run the onedrive command using the --confdir parameter. Here is an example: ```text onedrive --synchronize --verbose --confdir="~/.config/onedrivePersonal" & onedrive --synchronize --verbose --confdir="~/.config/onedriveWork" & ``` or ```text onedrive --monitor --verbose --confdir="~/.config/onedrivePersonal" & onedrive --monitor --verbose --confdir="~/.config/onedriveWork" & ``` * `--synchronize` does a one-time sync * `--monitor` keeps the application running and monitoring for changes both local and remote * `&` puts the application in background and leaves the terminal interactive ### Automatic syncing of both OneDrive accounts In order to automatically start syncing your OneDrive accounts, you will need to create a service file for each account. From the `~/onedrive` folder: ```text cp onedrive.service onedrive-work.service ``` And edit the line beginning with `ExecStart` so that the command mirrors the one you used above: ```text ExecStart=/usr/local/bin/onedrive --monitor --confdir="/path/to/config/dir" ``` Then you can safely run these commands: ```text systemctl --user enable onedrive-work systemctl --user start onedrive-work ``` Repeat these steps for each OneDrive account that you wish to use. ### Access OneDrive service through a proxy If you have a requirement to run the client through a proxy, there are a couple of ways to achieve this: 1. Set proxy configuration in `~/.bashrc` to allow the authorization process and when utilizing `--synchronize` 2. If running as a systemd service, edit the applicable systemd service file to include the proxy configuration information: ```text [Unit] Description=OneDrive Free Client Documentation=https://github.com/abraunegg/onedrive After=network-online.target Wants=network-online.target [Service] Environment="HTTP_PROXY=http://ip.address:port" Environment="HTTPS_PROXY=http://ip.address:port" ExecStart=/usr/local/bin/onedrive --monitor Restart=on-failure RestartSec=3 [Install] WantedBy=default.target ``` **Note:** After modifying the service files, you will need to run `sudo systemctl daemon-reload` to ensure the service file changes are picked up. A restart of the OneDrive service will also be required to pick up the change to send the traffic via the proxy server ### Setup selinux for a sync folder outside of the home folder If selinux is enforced and the sync folder is outside of the home folder, as long as there is no policy for cloud fileservice providers, label the file system folder to user_home_t. ```text sudo semanage fcontext -a -t user_home_t /path/to/onedriveSyncFolder sudo restorecon -R -v /path/to/onedriveSyncFolder ``` To remove this change from selinux and restore the default behaivor: ```text sudo semanage fcontext -d /path/to/onedriveSyncFolder sudo restorecon -R -v /path/to/onedriveSyncFolder ``` ## All available commands Output of `onedrive --help` ```text OneDrive - a client for OneDrive Cloud Services Usage: onedrive [options] --synchronize Do a one time synchronization onedrive [options] --monitor Monitor filesystem and sync regularly onedrive [options] --display-config Display the currently used configuration onedrive [options] --display-sync-status Query OneDrive service and report on pending changes onedrive -h | --help Show this help screen onedrive --version Show version Options: --auth-files ARG Perform authorization via two files passed in as ARG in the format `authUrl:responseUrl` The authorization URL is written to the `authUrl`, then onedrive waits for the file `responseUrl` to be present, and reads the response from that file. --check-for-nomount Check for the presence of .nosync in the syncdir root. If found, do not perform sync. --check-for-nosync Check for the presence of .nosync in each directory. If found, skip directory from sync. --confdir ARG Set the directory used to store the configuration files --create-directory ARG Create a directory on OneDrive - no sync will be performed. --debug-https Debug OneDrive HTTPS communication. --destination-directory ARG Destination directory for renamed or move on OneDrive - no sync will be performed. --disable-notifications Do not use desktop notifications in monitor mode. --disable-upload-validation Disable upload validation when uploading to OneDrive --display-config Display what options the client will use as currently configured - no sync will be performed. --display-sync-status Display the sync status of the client - no sync will be performed. --download-only Only download remote changes --dry-run Perform a trial sync with no changes made --enable-logging Enable client activity to a separate log file --force-http-1.1 Force the use of HTTP/1.1 for all operations (DEPRECIATED) --force-http-2 Force the use of HTTP/2 for all operations where applicable --get-O365-drive-id ARG Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library --get-file-link ARG Display the file link of a synced file --help -h This help information. --local-first Synchronize from the local directory source first, before downloading changes from OneDrive. --log-dir ARG Directory where logging output is saved to, needs to end with a slash. --logout Logout the current user --min-notify-changes ARG Minimum number of pending incoming changes necessary to trigger a desktop notification --monitor -m Keep monitoring for local and remote changes --monitor-fullscan-frequency ARG Number of sync runs before performing a full local scan of the synced directory --monitor-interval ARG Number of seconds by which each sync operation is undertaken when idle under monitor mode. --monitor-log-frequency ARG Frequency of logging in monitor mode --no-remote-delete Do not delete local file 'deletes' from OneDrive when using --upload-only --print-token Print the access token, useful for debugging --remove-directory ARG Remove a directory on OneDrive - no sync will be performed. --resync Forget the last saved state, perform a full sync --single-directory ARG Specify a single local directory within the OneDrive root to sync. --skip-dir Skip any directories that match this pattern from syncing --skip-dot-files Skip dot files and folders from syncing --skip-file ARG Skip any files that match this pattern from syncing --skip-size Skip new files larger than this size (in MB) --skip-symlinks Skip syncing of symlinks --source-directory ARG Source directory to rename or move on OneDrive - no sync will be performed. --sync-root-files Sync all files in sync_dir root when using sync_list. --syncdir ARG Specify the local directory used for synchronization to OneDrive --synchronize Perform a synchronization --upload-only Only upload to OneDrive, do not sync changes from OneDrive locally --user-agent ARG Specify a User Agent string to the http client --verbose -v+ Print more details, useful for debugging (repeat for extra debugging) --version Print the version and exit ``` onedrive-2.3.13/install-sh000077500000000000000000000360101360252424000154150ustar00rootroot00000000000000#!/bin/sh # install - install a program, script, or datafile scriptversion=2018-03-11.20; # UTC # This originates from X11R5 (mit/util/scripts/install.sh), which was # later released in X11R6 (xc/config/util/install.sh) with the # following copyright and license. # # Copyright (C) 1994 X Consortium # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN # AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- # TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # # Except as contained in this notice, the name of the X Consortium shall not # be used in advertising or otherwise to promote the sale, use or other deal- # ings in this Software without prior written authorization from the X Consor- # tium. # # # FSF changes to this file are in the public domain. # # Calling this script install-sh is preferred over install.sh, to prevent # 'make' implicit rules from creating a file called install from it # when there is no Makefile. # # This script is compatible with the BSD install script, but was written # from scratch. tab=' ' nl=' ' IFS=" $tab$nl" # Set DOITPROG to "echo" to test this script. doit=${DOITPROG-} doit_exec=${doit:-exec} # Put in absolute file names if you don't have them in your path; # or use environment vars. chgrpprog=${CHGRPPROG-chgrp} chmodprog=${CHMODPROG-chmod} chownprog=${CHOWNPROG-chown} cmpprog=${CMPPROG-cmp} cpprog=${CPPROG-cp} mkdirprog=${MKDIRPROG-mkdir} mvprog=${MVPROG-mv} rmprog=${RMPROG-rm} stripprog=${STRIPPROG-strip} posix_mkdir= # Desired mode of installed file. mode=0755 chgrpcmd= chmodcmd=$chmodprog chowncmd= mvcmd=$mvprog rmcmd="$rmprog -f" stripcmd= src= dst= dir_arg= dst_arg= copy_on_change=false is_target_a_directory=possibly usage="\ Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE or: $0 [OPTION]... SRCFILES... DIRECTORY or: $0 [OPTION]... -t DIRECTORY SRCFILES... or: $0 [OPTION]... -d DIRECTORIES... In the 1st form, copy SRCFILE to DSTFILE. In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. In the 4th, create DIRECTORIES. Options: --help display this help and exit. --version display version info and exit. -c (ignored) -C install only if different (preserve the last data modification time) -d create directories instead of installing files. -g GROUP $chgrpprog installed files to GROUP. -m MODE $chmodprog installed files to MODE. -o USER $chownprog installed files to USER. -s $stripprog installed files. -t DIRECTORY install into DIRECTORY. -T report an error if DSTFILE is a directory. Environment variables override the default commands: CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG RMPROG STRIPPROG " while test $# -ne 0; do case $1 in -c) ;; -C) copy_on_change=true;; -d) dir_arg=true;; -g) chgrpcmd="$chgrpprog $2" shift;; --help) echo "$usage"; exit $?;; -m) mode=$2 case $mode in *' '* | *"$tab"* | *"$nl"* | *'*'* | *'?'* | *'['*) echo "$0: invalid mode: $mode" >&2 exit 1;; esac shift;; -o) chowncmd="$chownprog $2" shift;; -s) stripcmd=$stripprog;; -t) is_target_a_directory=always dst_arg=$2 # Protect names problematic for 'test' and other utilities. case $dst_arg in -* | [=\(\)!]) dst_arg=./$dst_arg;; esac shift;; -T) is_target_a_directory=never;; --version) echo "$0 $scriptversion"; exit $?;; --) shift break;; -*) echo "$0: invalid option: $1" >&2 exit 1;; *) break;; esac shift done # We allow the use of options -d and -T together, by making -d # take the precedence; this is for compatibility with GNU install. if test -n "$dir_arg"; then if test -n "$dst_arg"; then echo "$0: target directory not allowed when installing a directory." >&2 exit 1 fi fi if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then # When -d is used, all remaining arguments are directories to create. # When -t is used, the destination is already specified. # Otherwise, the last argument is the destination. Remove it from $@. for arg do if test -n "$dst_arg"; then # $@ is not empty: it contains at least $arg. set fnord "$@" "$dst_arg" shift # fnord fi shift # arg dst_arg=$arg # Protect names problematic for 'test' and other utilities. case $dst_arg in -* | [=\(\)!]) dst_arg=./$dst_arg;; esac done fi if test $# -eq 0; then if test -z "$dir_arg"; then echo "$0: no input file specified." >&2 exit 1 fi # It's OK to call 'install-sh -d' without argument. # This can happen when creating conditional directories. exit 0 fi if test -z "$dir_arg"; then if test $# -gt 1 || test "$is_target_a_directory" = always; then if test ! -d "$dst_arg"; then echo "$0: $dst_arg: Is not a directory." >&2 exit 1 fi fi fi if test -z "$dir_arg"; then do_exit='(exit $ret); exit $ret' trap "ret=129; $do_exit" 1 trap "ret=130; $do_exit" 2 trap "ret=141; $do_exit" 13 trap "ret=143; $do_exit" 15 # Set umask so as not to create temps with too-generous modes. # However, 'strip' requires both read and write access to temps. case $mode in # Optimize common cases. *644) cp_umask=133;; *755) cp_umask=22;; *[0-7]) if test -z "$stripcmd"; then u_plus_rw= else u_plus_rw='% 200' fi cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;; *) if test -z "$stripcmd"; then u_plus_rw= else u_plus_rw=,u+rw fi cp_umask=$mode$u_plus_rw;; esac fi for src do # Protect names problematic for 'test' and other utilities. case $src in -* | [=\(\)!]) src=./$src;; esac if test -n "$dir_arg"; then dst=$src dstdir=$dst test -d "$dstdir" dstdir_status=$? else # Waiting for this to be detected by the "$cpprog $src $dsttmp" command # might cause directories to be created, which would be especially bad # if $src (and thus $dsttmp) contains '*'. if test ! -f "$src" && test ! -d "$src"; then echo "$0: $src does not exist." >&2 exit 1 fi if test -z "$dst_arg"; then echo "$0: no destination specified." >&2 exit 1 fi dst=$dst_arg # If destination is a directory, append the input filename. if test -d "$dst"; then if test "$is_target_a_directory" = never; then echo "$0: $dst_arg: Is a directory" >&2 exit 1 fi dstdir=$dst dstbase=`basename "$src"` case $dst in */) dst=$dst$dstbase;; *) dst=$dst/$dstbase;; esac dstdir_status=0 else dstdir=`dirname "$dst"` test -d "$dstdir" dstdir_status=$? fi fi case $dstdir in */) dstdirslash=$dstdir;; *) dstdirslash=$dstdir/;; esac obsolete_mkdir_used=false if test $dstdir_status != 0; then case $posix_mkdir in '') # Create intermediate dirs using mode 755 as modified by the umask. # This is like FreeBSD 'install' as of 1997-10-28. umask=`umask` case $stripcmd.$umask in # Optimize common cases. *[2367][2367]) mkdir_umask=$umask;; .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;; *[0-7]) mkdir_umask=`expr $umask + 22 \ - $umask % 100 % 40 + $umask % 20 \ - $umask % 10 % 4 + $umask % 2 `;; *) mkdir_umask=$umask,go-w;; esac # With -d, create the new directory with the user-specified mode. # Otherwise, rely on $mkdir_umask. if test -n "$dir_arg"; then mkdir_mode=-m$mode else mkdir_mode= fi posix_mkdir=false case $umask in *[123567][0-7][0-7]) # POSIX mkdir -p sets u+wx bits regardless of umask, which # is incompatible with FreeBSD 'install' when (umask & 300) != 0. ;; *) # Note that $RANDOM variable is not portable (e.g. dash); Use it # here however when possible just to lower collision chance. tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$ trap 'ret=$?; rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" 2>/dev/null; exit $ret' 0 # Because "mkdir -p" follows existing symlinks and we likely work # directly in world-writeable /tmp, make sure that the '$tmpdir' # directory is successfully created first before we actually test # 'mkdir -p' feature. if (umask $mkdir_umask && $mkdirprog $mkdir_mode "$tmpdir" && exec $mkdirprog $mkdir_mode -p -- "$tmpdir/a/b") >/dev/null 2>&1 then if test -z "$dir_arg" || { # Check for POSIX incompatibilities with -m. # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or # other-writable bit of parent directory when it shouldn't. # FreeBSD 6.1 mkdir -m -p sets mode of existing directory. test_tmpdir="$tmpdir/a" ls_ld_tmpdir=`ls -ld "$test_tmpdir"` case $ls_ld_tmpdir in d????-?r-*) different_mode=700;; d????-?--*) different_mode=755;; *) false;; esac && $mkdirprog -m$different_mode -p -- "$test_tmpdir" && { ls_ld_tmpdir_1=`ls -ld "$test_tmpdir"` test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1" } } then posix_mkdir=: fi rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" else # Remove any dirs left behind by ancient mkdir implementations. rmdir ./$mkdir_mode ./-p ./-- "$tmpdir" 2>/dev/null fi trap '' 0;; esac;; esac if $posix_mkdir && ( umask $mkdir_umask && $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir" ) then : else # The umask is ridiculous, or mkdir does not conform to POSIX, # or it failed possibly due to a race condition. Create the # directory the slow way, step by step, checking for races as we go. case $dstdir in /*) prefix='/';; [-=\(\)!]*) prefix='./';; *) prefix='';; esac oIFS=$IFS IFS=/ set -f set fnord $dstdir shift set +f IFS=$oIFS prefixes= for d do test X"$d" = X && continue prefix=$prefix$d if test -d "$prefix"; then prefixes= else if $posix_mkdir; then (umask=$mkdir_umask && $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break # Don't fail if two instances are running concurrently. test -d "$prefix" || exit 1 else case $prefix in *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; *) qprefix=$prefix;; esac prefixes="$prefixes '$qprefix'" fi fi prefix=$prefix/ done if test -n "$prefixes"; then # Don't fail if two instances are running concurrently. (umask $mkdir_umask && eval "\$doit_exec \$mkdirprog $prefixes") || test -d "$dstdir" || exit 1 obsolete_mkdir_used=true fi fi fi if test -n "$dir_arg"; then { test -z "$chowncmd" || $doit $chowncmd "$dst"; } && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } && { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false || test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1 else # Make a couple of temp file names in the proper directory. dsttmp=${dstdirslash}_inst.$$_ rmtmp=${dstdirslash}_rm.$$_ # Trap to clean up those temp files at exit. trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 # Copy the file name to the temp name. (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") && # and set any options; do chmod last to preserve setuid bits. # # If any of these fail, we abort the whole thing. If we want to # ignore errors from any of these, just make sure not to ignore # errors from the above "$doit $cpprog $src $dsttmp" command. # { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } && { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } && { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } && # If -C, don't bother to copy if it wouldn't change the file. if $copy_on_change && old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && set -f && set X $old && old=:$2:$4:$5:$6 && set X $new && new=:$2:$4:$5:$6 && set +f && test "$old" = "$new" && $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 then rm -f "$dsttmp" else # Rename the file to the real destination. $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null || # The rename failed, perhaps because mv can't rename something else # to itself, or perhaps because mv is so ancient that it does not # support -f. { # Now remove or move aside any old file at destination location. # We try this two ways since rm can't unlink itself on some # systems and the destination file might be busy for other # reasons. In this case, the final cleanup might fail but the new # file should still install successfully. { test ! -f "$dst" || $doit $rmcmd -f "$dst" 2>/dev/null || { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } } || { echo "$0: cannot unlink or rename $dst" >&2 (exit 1); exit 1 } } && # Now rename the file to the real destination. $doit $mvcmd "$dsttmp" "$dst" } fi || exit 1 trap '' 0 fi done # Local variables: # eval: (add-hook 'before-save-hook 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-time-zone: "UTC0" # time-stamp-end: "; # UTC" # End: onedrive-2.3.13/onedrive.1.in000066400000000000000000000213631360252424000157200ustar00rootroot00000000000000.TH ONEDRIVE "1" "@PACKAGE_DATE@" "@PACKAGE_VERSION@" "User Commands" .SH NAME onedrive \- folder synchronization with OneDrive .SH SYNOPSIS .B onedrive [\fI\,OPTION\/\fR] \-\-synchronize .br .B onedrive [\fI\,OPTION\/\fR] \-\-monitor .br .B onedrive [\fI\,OPTION\/\fR] \-\-display-config .br .B onedrive [\fI\,OPTION\/\fR] \-\-display-sync-status .SH DESCRIPTION A complete tool to interact with OneDrive on Linux. .SH OPTIONS Without any option given, no sync is done and the program exits. .TP \fB\-\-auth\-files\fP ARG Perform authorization via two files passed in as \fBARG\fP in the format \fBauthUrl:responseUrl\fP. The authorization URL is written to the \fBauthUrl\fP, then \fBonedrive\fP waits for the file \fBresponseUrl\fP to be present, and reads the response from that file. .TP \fB\-\-check\-for\-nomount\fP Check for the presence of .nosync in the syncdir root. If found, do not perform sync. .br Configuration file key: \fBcheck_nomount\fP (default: \fBfalse\fP) .TP \fB\-\-check\-for\-nosync\fP Check for the presence of .nosync in each directory. If found, skip directory from sync. .br Configuration file key: \fBcheck_nosync\fP (default: \fBfalse\fP) .TP \fB\-\-confdir\fP ARG Set the directory used to store the configuration files .TP \fB\-\-create\-directory\fP ARG Create a directory on OneDrive \- no sync will be performed. .TP \fB\-\-destination\-directory\fP ARG Destination directory for renamed or move on OneDrive \- no sync will be performed. .TP \fB\-\-debug\-https\fP Debug OneDrive HTTPS communication. .br Configuration file key: \fBdebug_https\fP (default: \fBfalse\fP) .TP \fB\-\-disable\-notifications\fP Do not use desktop notifications in monitor mode .br Configuration file key: \fBdisable_notifications\fP (default: \fBfalse\fP) .TP \fB\-\-disable\-upload\-validation\fP Disable upload validation when uploading to OneDrive .br Configuration file key: \fBdisable_upload_validation\fP (default: \fBfalse\fP) .TP \fB\-\-display\-config\fP Display what options the client will use as currently configured \- no sync will be performed. .TP \fB\-\-display\-sync\-status\fP Display the sync status of the client \- no sync will be performed. .TP \fB\-\-download\-only\fP Only download remote changes .br Configuration file key: \fBdownload_only\fP (default: \fBfalse\fP) .TP \fB\-\-dry\-run\fP Perform a trial sync with no changes made. Can ONLY be used with --synchronize. Will be ignored for --monitor .br Configuration file key: \fBdry_run\fP (default: \fBfalse\fP) .TP \fB\-\-enable\-logging\fP Enable client activity to a separate log file .br Configuration file key: \fBenable_logging\fP (default: \fBfalse\fP) .TP \fB\-\-force\-http\-1.1\fP Force the use of HTTP 1.1 for all operations (DEPRECIATED) .br Configuration file key: \fBforce_http_11\fP (default: \fBfalse\fP) .TP \fB\-\-force\-http\-2\fP Force the use of HTTP/2 for all operations where applicable .br Configuration file key: \fBforce_http_2\fP (default: \fBfalse\fP) .TP \fB\-\-get\-O365\-drive\-id\fP ARG Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library .TP \fB\-\-get\-file\-link\fP ARG Display the file link of a synced file .TP \fB\-\-local\-first\fP Synchronize from the local directory source first, before downloading changes from OneDrive. .br Configuration file key: \fBlocal_first\fP (default: \fBfalse\fP) .TP \fB\-\-logout\fP Logout the current user .TP \fB\-\-log\-dir\fP ARG defines the directory where logging output is saved to, needs to end with a slash .br Configuration file key: \fBlog_dir\fP (default: \fB/var/log/onedrive/\fP) .TP \fB\-\-min-notify-changes\fP the minimum number of pending incoming changes necessary to trigger a desktop notification .br Configuration file key: \fBmin_notify_changes\fP (default: \fB5\fP) .TP \fB\-m \-\-monitor\fP Keep monitoring for local and remote changes .TP \fB\-\-monitor\-interval\fP ARG The number of seconds by which each sync operation is undertaken when idle under monitor mode .br Configuration file key: \fBmonitor_interval\fP (default: \fB45\fP) .TP \fB\-\-monitor\-fullscan-frequency\fP ARG Number of sync runs before performing a full local scan of the synced directory .br Configuration file key: \fBmonitor_fullscan_frequency\fP (default: \fB10\fP) .TP \fB\-\-no\-remote\-delete\fP Do not delete local file 'deletes' from OneDrive when using \fB\-\-upload\-only\fR .br Configuration file key: \fBno_remote_delete\fP (default: \fBfalse\fP) .TP \fB\-\-print\-token\fP Print the access token, useful for debugging .TP \fB\-\-resync\fP Forget the last saved state, perform a full sync .TP \fB\-\-remove\-directory\fP ARG Remove a directory on OneDrive \- no sync will be performed. .TP \fB\-\-single\-directory\fP ARG Specify a single local directory within the OneDrive root to sync. .TP \fB\-\-skip\-dot\-files\fP Skip dot files and folders from syncing .br Configuration file key: \fBskip_dotfiles\fP (default: \fBfalse\fP) .TP \fB\-\-skip\-file\fP Skip any files that match this pattern from syncing .br Configuration file key: \fBskip_file\fP (default: \fB~*|.~*|*.tmp\fP) .TP \fB\-\-skip\-symlinks\fP Skip syncing of symlinks .br Configuration file key: \fBskip_symlinks\fP (default: \fBfalse\fP) .TP \fB\-\-source\-directory\fP ARG Source directory to rename or move on OneDrive \- no sync will be performed. .TP \fB\-\-sync\-root\-files\fP Sync all files in sync_dir root when using sync_list. .TP \fB\-\-syncdir\fP ARG Set the directory used to sync the files that are synced .br Configuration file key: \fBsync_dir\fP (default: \fB~/OneDrive\fP) .TP \fB\-\-synchronize\fP Perform a synchronization .TP \fB\-\-upload\-only\fP Only upload to OneDrive, do not sync changes from OneDrive locally .br Configuration file key: \fBupload_only\fP (default: \fBfalse\fP) .TP \fB\-\-user\-agent\fP ARG Set the used User Agent identifier .br Configuration file key: \fBuser_agent\fP (default: don't change) .TP \fB\-v \-\-verbose\fP Print more details, useful for debugging. Given two times (or more) enables even more verbose debug statements. .TP \fB\-\-version\fP Print the version and exit .TP \fB\-h \-\-help\fP This help information. .PP .SH FEATURES State caching Real-Time file monitoring with Inotify Resumable uploads Support OneDrive for Business (part of Office 365) Shared folders (OneDrive Personal) SharePoint / Office 365 Group Drives (refer to README.Office365.md to configure) .SH CONFIGURATION You should copy the default config file into your home directory before making changes: .nf \fB mkdir\ \-p\ ~/.config/onedrive cp\ @DOCDIR@/config\ ~/.config/onedrive/config \fP .fi For the supported options see the above list of command line options for the availability of a configuration key. .PP Pattern are case insensitive. \fB*\fP and \fB?\fP wildcards characters are supported. Use \fB|\fP to separate multiple patterns. After changing the filters (\fBskip_file\fP or \fBskip_dir\fP in your configs) you must execute \fBonedrive --synchronize --resync\fP. .SH FIRST RUN After installing the application you must run it at least once from the terminal to authorize it. You will be asked to open a specific link using your web browser where you will have to login into your Microsoft Account and give the application the permission to access your files. After giving the permission, you will be redirected to a blank page. Copy the URI of the blank page into the application. .SH SYSTEMD INTEGRATION Service files are installed into user and system directories. .TP OneDrive service running as root user To enable this mode, run as root user .nf \fB systemctl enable onedrive systemctl start onedrive \fP .fi .TP OneDrive service running as root user for a non-root user This mode allows starting the OneDrive service automatically with system start for multiple users. For each \fB\fP run: .nf \fB systemctl enable onedrive@ systemctl start onedrive@ \fP .fi .TP OneDrive service running as non-root user In this mode the service will be started when the user logs in. Run as user .nf \fB systemctl --user enable onedrive systemctl --user start onedrive \fP .fi .SH LOGGING OUTPUT When running onedrive all actions can be logged to a separate log file. This can be enabled by using the \fB--enable-logging\fP flag. By default, log files will be written to \fB/var/log/onedrive\fP. All logfiles will be in the format of \fB%username%.onedrive.log\fP, where \fB%username%\fP represents the user who ran the client. .SH NOTIFICATIONS If OneDrive has been compiled with support for notifications, a running \fBonedrive\fP in monitor mode will send notifications about initialization and errors via libnotify to the dbus. Note that this does not work if \fBonedrive\fP is started as root for a user via the \fBonedrive@\fP service. .SH SEE ALSO Further examples and documentation is available in \f[C]README.md\f[] \f[C]README.Office365.md\f[] onedrive-2.3.13/src/000077500000000000000000000000001360252424000142005ustar00rootroot00000000000000onedrive-2.3.13/src/config.d000066400000000000000000000376601360252424000156260ustar00rootroot00000000000000import core.stdc.stdlib: EXIT_SUCCESS, EXIT_FAILURE, exit; import std.file, std.string, std.regex, std.stdio, std.process, std.algorithm.searching, std.getopt, std.conv; import std.algorithm.sorting: sort; import selective; static import log; final class Config { public string refreshTokenFilePath; public string deltaLinkFilePath; public string databaseFilePath; public string databaseFilePathDryRun; public string uploadStateFilePath; public string syncListFilePath; public string homePath; public string configDirName; public string defaultSyncDir = "~/OneDrive"; public string defaultSkipFile = "~*|.~*|*.tmp"; public string defaultSkipDir = ""; public string configFileSyncDir; public string configFileSkipFile; public string configFileSkipDir; private string userConfigFilePath; // hashmap for the values found in the user config file // ARGGGG D is stupid and cannot make hashmap initializations!!! // private string[string] foobar = [ "aa": "bb" ] does NOT work!!! private string[string] stringValues; private bool[string] boolValues; private long[string] longValues; this(string confdirOption) { // default configuration stringValues["sync_dir"] = defaultSyncDir; stringValues["skip_file"] = defaultSkipFile; stringValues["skip_dir"] = defaultSkipDir; stringValues["log_dir"] = "/var/log/onedrive/"; stringValues["drive_id"] = ""; stringValues["user_agent"] = ""; boolValues["upload_only"] = false; boolValues["check_nomount"] = false; boolValues["check_nosync"] = false; boolValues["download_only"] = false; boolValues["disable_notifications"] = false; boolValues["disable_upload_validation"] = false; boolValues["enable_logging"] = false; boolValues["force_http_11"] = false; boolValues["force_http_2"] = false; boolValues["local_first"] = false; boolValues["no_remote_delete"] = false; boolValues["skip_symlinks"] = false; boolValues["debug_https"] = false; boolValues["skip_dotfiles"] = false; boolValues["dry_run"] = false; boolValues["sync_root_files"] = false; longValues["verbose"] = log.verbose; // might be initialized by the first getopt call! longValues["monitor_interval"] = 45, longValues["skip_size"] = 0, longValues["min_notify_changes"] = 5; longValues["monitor_log_frequency"] = 5; // Number of n sync runs before performing a full local scan of sync_dir // By default 10 which means every ~7.5 minutes a full disk scan of sync_dir will occur longValues["monitor_fullscan_frequency"] = 10; // Determine the users home directory. // Need to avoid using ~ here as expandTilde() below does not interpret correctly when running under init.d or systemd scripts // Check for HOME environment variable if (environment.get("HOME") != ""){ // Use HOME environment variable log.vdebug("homePath: HOME environment variable set"); homePath = environment.get("HOME"); } else { if ((environment.get("SHELL") == "") && (environment.get("USER") == "")){ // No shell is set or username - observed case when running as systemd service under CentOS 7.x log.vdebug("homePath: WARNING - no HOME environment variable set"); log.vdebug("homePath: WARNING - no SHELL environment variable set"); log.vdebug("homePath: WARNING - no USER environment variable set"); homePath = "/root"; } else { // A shell & valid user is set, but no HOME is set, use ~ which can be expanded log.vdebug("homePath: WARNING - no HOME environment variable set"); homePath = "~"; } } // Output homePath calculation log.vdebug("homePath: ", homePath); // Determine the correct configuration directory to use string configDirBase; if (confdirOption != "") { // A CLI 'confdir' was passed in log.vdebug("configDirName: CLI override to set configDirName to: ", confdirOption); if (canFind(confdirOption,"~")) { // A ~ was found log.vdebug("configDirName: A '~' was found in configDirName, using the calculated 'homePath' to replace '~'"); configDirName = homePath ~ strip(confdirOption,"~","~"); } else { configDirName = confdirOption; } } else { // Determine the base directory relative to which user specific configuration files should be stored. if (environment.get("XDG_CONFIG_HOME") != ""){ log.vdebug("configDirBase: XDG_CONFIG_HOME environment variable set"); configDirBase = environment.get("XDG_CONFIG_HOME"); } else { // XDG_CONFIG_HOME does not exist on systems where X11 is not present - ie - headless systems / servers log.vdebug("configDirBase: WARNING - no XDG_CONFIG_HOME environment variable set"); configDirBase = homePath ~ "/.config"; } // Output configDirBase calculation log.vdebug("configDirBase: ", configDirBase); // Set the default application configuration directory log.vdebug("configDirName: Configuring application to use default config path"); // configDirBase contains the correct path so we do not need to check for presence of '~' configDirName = configDirBase ~ "/onedrive"; } log.vlog("Using Config Dir: ", configDirName); if (!exists(configDirName)) mkdirRecurse(configDirName); refreshTokenFilePath = configDirName ~ "/refresh_token"; deltaLinkFilePath = configDirName ~ "/delta_link"; databaseFilePath = configDirName ~ "/items.sqlite3"; databaseFilePathDryRun = configDirName ~ "/items-dryrun.sqlite3"; uploadStateFilePath = configDirName ~ "/resume_upload"; userConfigFilePath = configDirName ~ "/config"; syncListFilePath = configDirName ~ "/sync_list"; } bool initialize() { if (!load(userConfigFilePath)) { // What was the reason for failure? if (!exists(userConfigFilePath)) { log.vlog("No config file found, using application defaults"); return true; } else { log.log("Configuration file has errors - please check your configuration"); return false; } } return true; } void update_from_args(string[] args) { // Add additional options that are NOT configurable via config file stringValues["create_directory"] = ""; stringValues["destination_directory"] = ""; stringValues["get_file_link"] = ""; stringValues["get_o365_drive_id"] = ""; stringValues["remove_directory"] = ""; stringValues["single_directory"] = ""; stringValues["source_directory"] = ""; stringValues["auth_files"] = ""; boolValues["display_config"] = false; boolValues["display_sync_status"] = false; boolValues["resync"] = false; boolValues["print_token"] = false; boolValues["logout"] = false; boolValues["monitor"] = false; boolValues["synchronize"] = false; // Application Startup option validation try { string tmpStr; bool tmpBol; long tmpVerb; auto opt = getopt( args, std.getopt.config.bundling, std.getopt.config.caseSensitive, "auth-files", "Perform authentication not via interactive dialog but via files read/writes to these files.", &stringValues["auth_files"], "check-for-nomount", "Check for the presence of .nosync in the syncdir root. If found, do not perform sync.", &boolValues["check_nomount"], "check-for-nosync", "Check for the presence of .nosync in each directory. If found, skip directory from sync.", &boolValues["check_nosync"], "create-directory", "Create a directory on OneDrive - no sync will be performed.", &stringValues["create_directory"], "debug-https", "Debug OneDrive HTTPS communication.", &boolValues["debug_https"], "destination-directory", "Destination directory for renamed or move on OneDrive - no sync will be performed.", &stringValues["destination_directory"], "disable-notifications", "Do not use desktop notifications in monitor mode.", &boolValues["disable_notifications"], "disable-upload-validation", "Disable upload validation when uploading to OneDrive", &boolValues["disable_upload_validation"], "display-config", "Display what options the client will use as currently configured - no sync will be performed.", &boolValues["display_config"], "display-sync-status", "Display the sync status of the client - no sync will be performed.", &boolValues["display_sync_status"], "download-only", "Only download remote changes", &boolValues["download_only"], "dry-run", "Perform a trial sync with no changes made", &boolValues["dry_run"], "enable-logging", "Enable client activity to a separate log file", &boolValues["enable_logging"], "force-http-1.1", "Force the use of HTTP/1.1 for all operations (DEPRECIATED)", &boolValues["force_http_11"], "force-http-2", "Force the use of HTTP/2 for all operations where applicable", &boolValues["force_http_2"], "get-file-link", "Display the file link of a synced file", &stringValues["get_file_link"], "get-O365-drive-id", "Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library", &stringValues["get_o365_drive_id"], "local-first", "Synchronize from the local directory source first, before downloading changes from OneDrive.", &boolValues["local_first"], "log-dir", "Directory where logging output is saved to, needs to end with a slash.", &stringValues["log_dir"], "logout", "Logout the current user", &boolValues["logout"], "min-notify-changes", "Minimum number of pending incoming changes necessary to trigger a desktop notification", &longValues["min_notify_changes"], "monitor|m", "Keep monitoring for local and remote changes", &boolValues["monitor"], "monitor-interval", "Number of seconds by which each sync operation is undertaken when idle under monitor mode.", &longValues["monitor_interval"], "monitor-fullscan-frequency", "Number of sync runs before performing a full local scan of the synced directory", &longValues["monitor_fullscan_frequency"], "monitor-log-frequency", "Frequency of logging in monitor mode", &longValues["monitor_log_frequency"], "no-remote-delete", "Do not delete local file 'deletes' from OneDrive when using --upload-only", &boolValues["no_remote_delete"], "print-token", "Print the access token, useful for debugging", &boolValues["print_token"], "resync", "Forget the last saved state, perform a full sync", &boolValues["resync"], "remove-directory", "Remove a directory on OneDrive - no sync will be performed.", &stringValues["remove_directory"], "single-directory", "Specify a single local directory within the OneDrive root to sync.", &stringValues["single_directory"], "skip-dot-files", "Skip dot files and folders from syncing", &boolValues["skip_dotfiles"], "skip-file", "Skip any files that match this pattern from syncing", &stringValues["skip_file"], "skip-dir", "Skip any directories that match this pattern from syncing", &stringValues["skip_dir"], "skip-size", "Skip new files larger than this size (in MB)", &longValues["skip_size"], "skip-symlinks", "Skip syncing of symlinks", &boolValues["skip_symlinks"], "source-directory", "Source directory to rename or move on OneDrive - no sync will be performed.", &stringValues["source_directory"], "syncdir", "Specify the local directory used for synchronization to OneDrive", &stringValues["sync_dir"], "synchronize", "Perform a synchronization", &boolValues["synchronize"], "sync-root-files", "Sync all files in sync_dir root when using sync_list.", &boolValues["sync_root_files"], "upload-only", "Only upload to OneDrive, do not sync changes from OneDrive locally", &boolValues["upload_only"], "user-agent", "Specify a User Agent string to the http client", &stringValues["user_agent"], // duplicated from main.d to get full help output! "confdir", "Set the directory used to store the configuration files", &tmpStr, "verbose|v+", "Print more details, useful for debugging (repeat for extra debugging)", &tmpVerb, "version", "Print the version and exit", &tmpBol ); if (opt.helpWanted) { outputLongHelp(opt.options); exit(EXIT_SUCCESS); } } catch (GetOptException e) { log.error(e.msg); log.error("Try 'onedrive -h' for more information"); exit(EXIT_FAILURE); } catch (Exception e) { // error log.error(e.msg); log.error("Try 'onedrive -h' for more information"); exit(EXIT_FAILURE); } } string getValueString(string key) { auto p = key in stringValues; if (p) { return *p; } else { throw new Exception("Missing config value: " ~ key); } } long getValueLong(string key) { auto p = key in longValues; if (p) { return *p; } else { throw new Exception("Missing config value: " ~ key); } } bool getValueBool(string key) { auto p = key in boolValues; if (p) { return *p; } else { throw new Exception("Missing config value: " ~ key); } } void setValueBool(string key, bool value) { boolValues[key] = value; } void setValueString(string key, string value) { stringValues[key] = value; } void setValueLong(string key, long value) { longValues[key] = value; } private bool load(string filename) { scope(failure) return false; auto file = File(filename, "r"); auto r = regex(`^(\w+)\s*=\s*"(.*)"\s*$`); foreach (line; file.byLine()) { line = stripLeft(line); if (line.length == 0 || line[0] == ';' || line[0] == '#') continue; auto c = line.matchFirst(r); if (!c.empty) { c.popFront(); // skip the whole match string key = c.front.dup; auto p = key in boolValues; if (p) { c.popFront(); // only accept "true" as true value. TODO Should we support other formats? setValueBool(key, c.front.dup == "true" ? true : false); } else { auto pp = key in stringValues; if (pp) { c.popFront(); setValueString(key, c.front.dup); // detect need for --resync for these: // --syncdir ARG // --skip-file ARG // --skip-dir ARG if (key == "sync_dir") configFileSyncDir = c.front.dup; if (key == "skip_file") configFileSkipFile = c.front.dup; if (key == "skip_dir") configFileSkipDir = c.front.dup; } else { auto ppp = key in longValues; if (ppp) { c.popFront(); setValueLong(key, to!long(c.front.dup)); } else { log.log("Unknown key in config file: ", key); return false; } } } } else { log.log("Malformed config line: ", line); return false; } } return true; } } void outputLongHelp(Option[] opt) { auto argsNeedingOptions = [ "--confdir", "--create-directory", "--destination-directory", "--get-O365-drive-id", "--log-dir", "--min-notify-changes", "--monitor-interval", "--monitor-log-frequency", "--monitor-fullscan-frequency", "--remove-directory", "--single-directory", "--skip-file", "--source-directory", "--syncdir", "--user-agent" ]; writeln(`OneDrive - a client for OneDrive Cloud Services Usage: onedrive [options] --synchronize Do a one time synchronization onedrive [options] --monitor Monitor filesystem and sync regularly onedrive [options] --display-config Display the currently used configuration onedrive [options] --display-sync-status Query OneDrive service and report on pending changes onedrive -h | --help Show this help screen onedrive --version Show version Options: `); foreach (it; opt.sort!("a.optLong < b.optLong")) { writefln(" %s%s%s%s\n %s", it.optLong, it.optShort == "" ? "" : " " ~ it.optShort, argsNeedingOptions.canFind(it.optLong) ? " ARG" : "", it.required ? " (required)" : "", it.help); } } unittest { auto cfg = new Config(""); cfg.load("config"); assert(cfg.getValueString("sync_dir") == "~/OneDrive"); } onedrive-2.3.13/src/itemdb.d000066400000000000000000000267201360252424000156200ustar00rootroot00000000000000import std.datetime; import std.exception; import std.path; import std.string; import core.stdc.stdlib; import sqlite; static import log; enum ItemType { file, dir, remote } struct Item { string driveId; string id; string name; ItemType type; string eTag; string cTag; SysTime mtime; string parentId; string crc32Hash; string sha1Hash; string quickXorHash; string remoteDriveId; string remoteId; } final class ItemDatabase { // increment this for every change in the db schema immutable int itemDatabaseVersion = 9; Database db; string insertItemStmt; string updateItemStmt; string selectItemByIdStmt; string selectItemByParentIdStmt; string deleteItemByIdStmt; this(const(char)[] filename) { db = Database(filename); int dbVersion; try { dbVersion = db.getVersion(); } catch (SqliteException e) { // An error was generated - what was the error? log.error("\nAn internal database error occurred: " ~ e.msg ~ "\n"); exit(-1); } if (dbVersion == 0) { createTable(); } else if (db.getVersion() != itemDatabaseVersion) { log.log("The item database is incompatible, re-creating database table structures"); db.exec("DROP TABLE item"); createTable(); } // Set the enforcement of foreign key constraints. // https://www.sqlite.org/pragma.html#pragma_foreign_keys // PRAGMA foreign_keys = boolean; db.exec("PRAGMA foreign_keys = TRUE"); // Set the recursive trigger capability // https://www.sqlite.org/pragma.html#pragma_recursive_triggers // PRAGMA recursive_triggers = boolean; db.exec("PRAGMA recursive_triggers = TRUE"); // Set the journal mode for databases associated with the current connection // https://www.sqlite.org/pragma.html#pragma_journal_mode db.exec("PRAGMA journal_mode = WAL"); // Automatic indexing is enabled by default as of version 3.7.17 // https://www.sqlite.org/pragma.html#pragma_automatic_index // PRAGMA automatic_index = boolean; db.exec("PRAGMA automatic_index = FALSE"); // Tell SQLite to store temporary tables in memory. This will speed up many read operations that rely on temporary tables, indices, and views. // https://www.sqlite.org/pragma.html#pragma_temp_store db.exec("PRAGMA temp_store = MEMORY"); // Tell SQlite to cleanup database table size // https://www.sqlite.org/pragma.html#pragma_auto_vacuum // PRAGMA schema.auto_vacuum = 0 | NONE | 1 | FULL | 2 | INCREMENTAL; db.exec("PRAGMA auto_vacuum = FULL"); insertItemStmt = " INSERT OR REPLACE INTO item (driveId, id, name, type, eTag, cTag, mtime, parentId, crc32Hash, sha1Hash, quickXorHash, remoteDriveId, remoteId) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13) "; updateItemStmt = " UPDATE item SET name = ?3, type = ?4, eTag = ?5, cTag = ?6, mtime = ?7, parentId = ?8, crc32Hash = ?9, sha1Hash = ?10, quickXorHash = ?11, remoteDriveId = ?12, remoteId = ?13 WHERE driveId = ?1 AND id = ?2 "; selectItemByIdStmt = " SELECT * FROM item WHERE driveId = ?1 AND id = ?2 "; selectItemByParentIdStmt = "SELECT * FROM item WHERE driveId = ? AND parentId = ?"; deleteItemByIdStmt = "DELETE FROM item WHERE driveId = ? AND id = ?"; } void createTable() { db.exec("CREATE TABLE item ( driveId TEXT NOT NULL, id TEXT NOT NULL, name TEXT NOT NULL, type TEXT NOT NULL, eTag TEXT, cTag TEXT, mtime TEXT NOT NULL, parentId TEXT, crc32Hash TEXT, sha1Hash TEXT, quickXorHash TEXT, remoteDriveId TEXT, remoteId TEXT, deltaLink TEXT, PRIMARY KEY (driveId, id), FOREIGN KEY (driveId, parentId) REFERENCES item (driveId, id) ON DELETE CASCADE ON UPDATE RESTRICT )"); db.exec("CREATE INDEX name_idx ON item (name)"); db.exec("CREATE INDEX remote_idx ON item (remoteDriveId, remoteId)"); db.exec("CREATE INDEX item_children_idx ON item (driveId, parentId)"); db.exec("CREATE INDEX selectByPath_idx ON item (name, driveId, parentId)"); db.setVersion(itemDatabaseVersion); } void insert(const ref Item item) { auto p = db.prepare(insertItemStmt); bindItem(item, p); p.exec(); } void update(const ref Item item) { auto p = db.prepare(updateItemStmt); bindItem(item, p); p.exec(); } void dump_open_statements() { db.dump_open_statements(); } int db_checkpoint() { return db.db_checkpoint(); } void upsert(const ref Item item) { auto s = db.prepare("SELECT COUNT(*) FROM item WHERE driveId = ? AND id = ?"); s.bind(1, item.driveId); s.bind(2, item.id); auto r = s.exec(); Statement stmt; if (r.front[0] == "0") stmt = db.prepare(insertItemStmt); else stmt = db.prepare(updateItemStmt); bindItem(item, stmt); stmt.exec(); } Item[] selectChildren(const(char)[] driveId, const(char)[] id) { auto p = db.prepare(selectItemByParentIdStmt); p.bind(1, driveId); p.bind(2, id); auto res = p.exec(); Item[] items; while (!res.empty) { items ~= buildItem(res); res.step(); } return items; } bool selectById(const(char)[] driveId, const(char)[] id, out Item item) { auto p = db.prepare(selectItemByIdStmt); p.bind(1, driveId); p.bind(2, id); auto r = p.exec(); if (!r.empty) { item = buildItem(r); return true; } return false; } // returns true if an item id is in the database bool idInLocalDatabase(const(string) driveId, const(string)id) { auto p = db.prepare(selectItemByIdStmt); p.bind(1, driveId); p.bind(2, id); auto r = p.exec(); if (!r.empty) { return true; } return false; } // returns the item with the given path // the path is relative to the sync directory ex: "./Music/Turbo Killer.mp3" bool selectByPath(const(char)[] path, string rootDriveId, out Item item) { Item currItem = { driveId: rootDriveId }; // Issue https://github.com/abraunegg/onedrive/issues/578 if (startsWith(path, "./") || path == ".") { // Need to remove the . from the path prefix path = "root/" ~ path.chompPrefix("."); } else { // Leave path as it is path = "root/" ~ path; } auto s = db.prepare("SELECT * FROM item WHERE name = ?1 AND driveId IS ?2 AND parentId IS ?3"); foreach (name; pathSplitter(path)) { s.bind(1, name); s.bind(2, currItem.driveId); s.bind(3, currItem.id); auto r = s.exec(); if (r.empty) return false; currItem = buildItem(r); // if the item is of type remote substitute it with the child if (currItem.type == ItemType.remote) { Item child; if (selectById(currItem.remoteDriveId, currItem.remoteId, child)) { assert(child.type != ItemType.remote, "The type of the child cannot be remote"); currItem = child; } } } item = currItem; return true; } // same as selectByPath() but it does not traverse remote folders bool selectByPathNoRemote(const(char)[] path, string rootDriveId, out Item item) { Item currItem = { driveId: rootDriveId }; // Issue https://github.com/abraunegg/onedrive/issues/578 if (startsWith(path, "./") || path == ".") { // Need to remove the . from the path prefix path = "root/" ~ path.chompPrefix("."); } else { // Leave path as it is path = "root/" ~ path; } auto s = db.prepare("SELECT * FROM item WHERE name IS ?1 AND driveId IS ?2 AND parentId IS ?3"); foreach (name; pathSplitter(path)) { s.bind(1, name); s.bind(2, currItem.driveId); s.bind(3, currItem.id); auto r = s.exec(); if (r.empty) return false; currItem = buildItem(r); } item = currItem; return true; } void deleteById(const(char)[] driveId, const(char)[] id) { auto p = db.prepare(deleteItemByIdStmt); p.bind(1, driveId); p.bind(2, id); p.exec(); } private void bindItem(const ref Item item, ref Statement stmt) { with (stmt) with (item) { bind(1, driveId); bind(2, id); bind(3, name); string typeStr = null; final switch (type) with (ItemType) { case file: typeStr = "file"; break; case dir: typeStr = "dir"; break; case remote: typeStr = "remote"; break; } bind(4, typeStr); bind(5, eTag); bind(6, cTag); bind(7, mtime.toISOExtString()); bind(8, parentId); bind(9, crc32Hash); bind(10, sha1Hash); bind(11, quickXorHash); bind(12, remoteDriveId); bind(13, remoteId); } } private Item buildItem(Statement.Result result) { assert(!result.empty, "The result must not be empty"); assert(result.front.length == 14, "The result must have 14 columns"); Item item = { driveId: result.front[0].dup, id: result.front[1].dup, name: result.front[2].dup, eTag: result.front[4].dup, cTag: result.front[5].dup, mtime: SysTime.fromISOExtString(result.front[6]), parentId: result.front[7].dup, crc32Hash: result.front[8].dup, sha1Hash: result.front[9].dup, quickXorHash: result.front[10].dup, remoteDriveId: result.front[11].dup, remoteId: result.front[12].dup }; switch (result.front[3]) { case "file": item.type = ItemType.file; break; case "dir": item.type = ItemType.dir; break; case "remote": item.type = ItemType.remote; break; default: assert(0, "Invalid item type"); } return item; } // computes the path of the given item id // the path is relative to the sync directory ex: "Music/Turbo Killer.mp3" // the trailing slash is not added even if the item is a directory string computePath(const(char)[] driveId, const(char)[] id) { assert(driveId && id); string path; Item item; auto s = db.prepare("SELECT * FROM item WHERE driveId = ?1 AND id = ?2"); auto s2 = db.prepare("SELECT driveId, id FROM item WHERE remoteDriveId = ?1 AND remoteId = ?2"); while (true) { s.bind(1, driveId); s.bind(2, id); auto r = s.exec(); if (!r.empty) { item = buildItem(r); if (item.type == ItemType.remote) { // substitute the last name with the current ptrdiff_t idx = indexOf(path, '/'); path = idx >= 0 ? item.name ~ path[idx .. $] : item.name; } else { if (path) path = item.name ~ "/" ~ path; else path = item.name; } id = item.parentId; } else { if (id == null) { // check for remoteItem s2.bind(1, item.driveId); s2.bind(2, item.id); auto r2 = s2.exec(); if (r2.empty) { // root reached assert(path.length >= 4); // remove "root" if (path.length >= 5) path = path[5 .. $]; else path = path[4 .. $]; // special case of computing the path of the root itself if (path.length == 0) path = "."; break; } else { // remote folder driveId = r2.front[0].dup; id = r2.front[1].dup; } } else { // broken tree assert(0); } } } return path; } Item[] selectRemoteItems() { Item[] items; auto stmt = db.prepare("SELECT * FROM item WHERE remoteDriveId IS NOT NULL"); auto res = stmt.exec(); while (!res.empty) { items ~= buildItem(res); res.step(); } return items; } string getDeltaLink(const(char)[] driveId, const(char)[] id) { assert(driveId && id); auto stmt = db.prepare("SELECT deltaLink FROM item WHERE driveId = ?1 AND id = ?2"); stmt.bind(1, driveId); stmt.bind(2, id); auto res = stmt.exec(); if (res.empty) return null; return res.front[0].dup; } void setDeltaLink(const(char)[] driveId, const(char)[] id, const(char)[] deltaLink) { assert(driveId && id); assert(deltaLink); auto stmt = db.prepare("UPDATE item SET deltaLink = ?3 WHERE driveId = ?1 AND id = ?2"); stmt.bind(1, driveId); stmt.bind(2, id); stmt.bind(3, deltaLink); stmt.exec(); } } onedrive-2.3.13/src/log.d000066400000000000000000000100721360252424000151260ustar00rootroot00000000000000import std.stdio; import std.file; import std.datetime; import std.process; import std.conv; import core.sys.posix.pwd, core.sys.posix.unistd, core.stdc.string : strlen; import std.algorithm : splitter; version(Notifications) { import dnotify; } // enable verbose logging long verbose; bool writeLogFile = false; private bool doNotifications; // shared string variable for username string username; string logFilePath; void init(string logDir) { writeLogFile = true; username = getUserName(); logFilePath = logDir; if (!exists(logFilePath)){ // logfile path does not exist try { mkdirRecurse(logFilePath); } catch (std.file.FileException e) { // we got an error .. writeln("\nUnable to access ", logFilePath); writeln("Please manually create '",logFilePath, "' and set appropriate permissions to allow write access"); writeln("The requested client activity log will instead be located in the users home directory\n"); } } } void setNotifications(bool value) { version(Notifications) { // if we try to enable notifications, check for server availability // and disable in case dbus server is not reachable if (value) { auto serverAvailable = dnotify.check_availability(); if (!serverAvailable) { log("Notification (dbus) server not available, disabling"); value = false; } } } doNotifications = value; } void log(T...)(T args) { writeln(args); if(writeLogFile){ // Write to log file logfileWriteLine(args); } } void logAndNotify(T...)(T args) { notify(args); log(args); } void fileOnly(T...)(T args) { if(writeLogFile){ // Write to log file logfileWriteLine(args); } } void vlog(T...)(T args) { if (verbose >= 1) { writeln(args); if(writeLogFile){ // Write to log file logfileWriteLine(args); } } } void vdebug(T...)(T args) { if (verbose >= 2) { writeln("[DEBUG] ", args); if(writeLogFile){ // Write to log file logfileWriteLine("[DEBUG] ", args); } } } void vdebugUpload(T...)(T args) { if (verbose >= 2) { writeln("\n[DEBUG] ", args); if(writeLogFile){ // Write to log file logfileWriteLine("\n[DEBUG] ", args); } } } void error(T...)(T args) { stderr.writeln(args); if(writeLogFile){ // Write to log file logfileWriteLine(args); } } void errorAndNotify(T...)(T args) { notify(args); error(args); } void notify(T...)(T args) { version(Notifications) { if (doNotifications) { string result; foreach (index, arg; args) { result ~= to!string(arg); if (index != args.length - 1) result ~= " "; } auto n = new Notification("OneDrive", result, "IGNORED"); try { n.show(); // Sent message to notification daemon if (verbose >= 2) { writeln("[DEBUG] Sent notification to notification service. If notification is not displayed, check dbus or notification-daemon for errors"); } } catch (Throwable e) { vlog("Got exception from showing notification: ", e); } } } } private void logfileWriteLine(T...)(T args) { // Write to log file string logFileName = .logFilePath ~ .username ~ ".onedrive.log"; auto currentTime = Clock.currTime(); auto timeString = currentTime.toString(); File logFile; // Resolve: std.exception.ErrnoException@std/stdio.d(423): Cannot open file `/var/log/onedrive/xxxxx.onedrive.log' in mode `a' (Permission denied) try { logFile = File(logFileName, "a"); } catch (std.exception.ErrnoException e) { // We cannot open the log file in logFilePath location for writing // The user is not part of the standard 'users' group (GID 100) // Change logfile to ~/onedrive.log putting the log file in the users home directory string homePath = environment.get("HOME"); string logFileNameAlternate = homePath ~ "/onedrive.log"; logFile = File(logFileNameAlternate, "a"); } // Write to the log file logFile.writeln(timeString, " ", args); logFile.close(); } private string getUserName() { auto pw = getpwuid(getuid); auto uinfo = pw.pw_gecos[0 .. strlen(pw.pw_gecos)].splitter(','); if (!uinfo.empty && uinfo.front.length){ return uinfo.front.idup; } else { // Unknown user? return "unknown"; } } onedrive-2.3.13/src/main.d000066400000000000000000001050361360252424000152760ustar00rootroot00000000000000import core.stdc.stdlib: EXIT_SUCCESS, EXIT_FAILURE, exit; import core.memory, core.time, core.thread; import std.getopt, std.file, std.path, std.process, std.stdio, std.conv, std.algorithm.searching, std.string, std.regex; import config, itemdb, monitor, onedrive, selective, sync, util; import std.net.curl: CurlException; import core.stdc.signal; import std.traits; static import log; OneDriveApi oneDrive; ItemDatabase itemDb; const int EXIT_UNAUTHORIZED = 3; enum MONITOR_LOG_SILENT = 2; enum MONITOR_LOG_QUIET = 1; enum LOG_NORMAL = 0; int main(string[] args) { // Disable buffering on stdout stdout.setvbuf(0, _IONBF); // configuration directory string confdirOption; try { // print the version and exit bool printVersion = false; auto opt = getopt( args, std.getopt.config.passThrough, std.getopt.config.bundling, std.getopt.config.caseSensitive, "confdir", "Set the directory used to store the configuration files", &confdirOption, "verbose|v+", "Print more details, useful for debugging (repeat for extra debugging)", &log.verbose, "version", "Print the version and exit", &printVersion ); if (opt.helpWanted) { args ~= "--help"; } if (printVersion) { std.stdio.write("onedrive ", import("version")); return EXIT_SUCCESS; } } catch (GetOptException e) { log.error(e.msg); log.error("Try 'onedrive -h' for more information"); return EXIT_FAILURE; } catch (Exception e) { // error log.error(e.msg); log.error("Try 'onedrive -h' for more information"); return EXIT_FAILURE; } // load configuration file if available auto cfg = new config.Config(confdirOption); if (!cfg.initialize()) { // There was an error loading the configuration // Error message already printed return EXIT_FAILURE; } // update configuration from command line args cfg.update_from_args(args); // Has any of our configuration that would require a --resync been changed? // 1. sync_list file modification // 2. config file modification - but only if sync_dir, skip_dir, skip_file or drive_id was modified // 3. CLI input overriding configured config file option string currentConfigHash; string currentSyncListHash; string previousConfigHash; string previousSyncListHash; string configHashFile = cfg.configDirName ~ "/.config.hash"; string syncListHashFile = cfg.configDirName ~ "/.sync_list.hash"; string configBackupFile = cfg.configDirName ~ "/.config.backup"; bool configOptionsDifferent = false; bool syncListConfigured = false; bool syncListDifferent = false; bool syncDirDifferent = false; bool skipFileDifferent = false; bool skipDirDifferent = false; if ((exists(cfg.configDirName ~ "/config")) && (!exists(configHashFile))) { // Hash of config file needs to be created std.file.write(configHashFile, computeQuickXorHash(cfg.configDirName ~ "/config")); } if ((exists(cfg.configDirName ~ "/sync_list")) && (!exists(syncListHashFile))) { // Hash of sync_list file needs to be created std.file.write(syncListHashFile, computeQuickXorHash(cfg.configDirName ~ "/sync_list")); } // If hash files exist, but config files do not ... remove the hash, but only if --resync was issued as now the application will use 'defaults' which 'may' be different if ((!exists(cfg.configDirName ~ "/config")) && (exists(configHashFile))) { // if --resync safe remove config.hash and config.backup if (cfg.getValueBool("resync")) { safeRemove(configHashFile); safeRemove(configBackupFile); } } if ((!exists(cfg.configDirName ~ "/sync_list")) && (exists(syncListHashFile))) { // if --resync safe remove sync_list.hash if (cfg.getValueBool("resync")) safeRemove(syncListHashFile); } // Read config hashes if they exist if (exists(cfg.configDirName ~ "/config")) currentConfigHash = computeQuickXorHash(cfg.configDirName ~ "/config"); if (exists(cfg.configDirName ~ "/sync_list")) currentSyncListHash = computeQuickXorHash(cfg.configDirName ~ "/sync_list"); if (exists(configHashFile)) previousConfigHash = readText(configHashFile); if (exists(syncListHashFile)) previousSyncListHash = readText(syncListHashFile); // Was sync_list updated? if (currentSyncListHash != previousSyncListHash) { // Debugging output to assist what changed log.vdebug("sync_list file has been updated, --resync needed"); syncListDifferent = true; } // Was config updated? if (currentConfigHash != previousConfigHash) { // config file was updated, however we only want to trigger a --resync requirement if sync_dir, skip_dir, skip_file or drive_id was modified log.vdebug("config file has been updated, checking if --resync needed"); if (exists(configBackupFile)) { // check backup config what has changed for these configuration options if anything // # sync_dir = "~/OneDrive" // # skip_file = "~*|.~*|*.tmp" // # skip_dir = "" // # drive_id = "" string[string] stringValues; stringValues["sync_dir"] = ""; stringValues["skip_file"] = ""; stringValues["skip_dir"] = ""; stringValues["drive_id"] = ""; auto file = File(configBackupFile, "r"); auto r = regex(`^(\w+)\s*=\s*"(.*)"\s*$`); foreach (line; file.byLine()) { line = stripLeft(line); if (line.length == 0 || line[0] == ';' || line[0] == '#') continue; auto c = line.matchFirst(r); if (!c.empty) { c.popFront(); // skip the whole match string key = c.front.dup; auto p = key in stringValues; if (p) { c.popFront(); // compare this key if ((key == "sync_dir") && (c.front.dup != cfg.getValueString("sync_dir"))) { log.vdebug(key, " was modified since the last time the application was successfully run, --resync needed"); configOptionsDifferent = true; } if ((key == "skip_file") && (c.front.dup != cfg.getValueString("skip_file"))){ log.vdebug(key, " was modified since the last time the application was successfully run, --resync needed"); configOptionsDifferent = true; } if ((key == "skip_dir") && (c.front.dup != cfg.getValueString("skip_dir"))){ log.vdebug(key, " was modified since the last time the application was successfully run, --resync needed"); configOptionsDifferent = true; } if ((key == "drive_id") && (c.front.dup != cfg.getValueString("drive_id"))){ log.vdebug(key, " was modified since the last time the application was successfully run, --resync needed"); configOptionsDifferent = true; } } } } } else { // no backup to check log.vdebug("WARNING: no backup config file was found, unable to validate if any changes made"); } // If there was a backup, any modified values we need to worry about would been detected if (!cfg.getValueBool("display_config")) { // we are not testing the configuration if (!configOptionsDifferent) { // no options are different if (!cfg.getValueBool("dry_run")) { // we are not in a dry-run scenario // update config hash log.vdebug("updating config hash as it is out of date"); std.file.write(configHashFile, computeQuickXorHash(cfg.configDirName ~ "/config")); // create backup copy of current config file log.vdebug("making backup of config file as it is out of date"); std.file.copy(cfg.configDirName ~ "/config", configBackupFile); } } } } // Is there a backup of the config file if the config file exists? if ((exists(cfg.configDirName ~ "/config")) && (!exists(configBackupFile))) { // create backup copy of current config file std.file.copy(cfg.configDirName ~ "/config", configBackupFile); } // config file set options can be changed via CLI input, specifically these will impact sync and --resync will be needed: // --syncdir ARG // --skip-file ARG // --skip-dir ARG if (exists(cfg.configDirName ~ "/config")) { // config file exists // was the sync_dir updated by CLI? if (cfg.configFileSyncDir != "") { // sync_dir was set in config file if (cfg.configFileSyncDir != cfg.getValueString("sync_dir")) { // config file was set and CLI input changed this log.vdebug("sync_dir: CLI override of config file option, --resync needed"); syncDirDifferent = true; } } // was the skip_file updated by CLI? if (cfg.configFileSkipFile != "") { // skip_file was set in config file if (cfg.configFileSkipFile != cfg.getValueString("skip_file")) { // config file was set and CLI input changed this log.vdebug("skip_file: CLI override of config file option, --resync needed"); skipFileDifferent = true; } } // was the skip_dir updated by CLI? if (cfg.configFileSkipDir != "") { // skip_dir was set in config file if (cfg.configFileSkipDir != cfg.getValueString("skip_dir")) { // config file was set and CLI input changed this log.vdebug("skip_dir: CLI override of config file option, --resync needed"); skipDirDifferent = true; } } } // Has anything triggered a --resync requirement? if (configOptionsDifferent || syncListDifferent || syncDirDifferent || skipFileDifferent || skipDirDifferent) { // --resync needed, is the user just testing configuration changes? if (!cfg.getValueBool("display_config")){ // not testing configuration changes if (!cfg.getValueBool("resync")) { // --resync not issued, fail fast log.error("An application configuration change has been detected where a --resync is required"); return EXIT_FAILURE; } else { // --resync issued, update hashes of config files if they exist if (!cfg.getValueBool("dry_run")) { // not doing a dry run, update hash files if config & sync_list exist if (exists(cfg.configDirName ~ "/config")) { // update hash log.vdebug("updating config hash as --resync issued"); std.file.write(configHashFile, computeQuickXorHash(cfg.configDirName ~ "/config")); // create backup copy of current config file log.vdebug("making backup of config file as --resync issued"); std.file.copy(cfg.configDirName ~ "/config", configBackupFile); } if (exists(cfg.configDirName ~ "/sync_list")) { // update sync_list hash log.vdebug("updating sync_list hash as --resync issued"); std.file.write(syncListHashFile, computeQuickXorHash(cfg.configDirName ~ "/sync_list")); } } } } } // dry-run notification if (cfg.getValueBool("dry_run")) { log.log("DRY-RUN Configured. Output below shows what 'would' have occurred."); } // Are we able to reach the OneDrive Service bool online = false; // dry-run database setup if (cfg.getValueBool("dry_run")) { // Make a copy of the original items.sqlite3 for use as the dry run copy if it exists if (exists(cfg.databaseFilePath)) { // copy the file log.vdebug("Copying items.sqlite3 to items-dryrun.sqlite3 to use for dry run operations"); copy(cfg.databaseFilePath,cfg.databaseFilePathDryRun); } } // sync_dir environment handling to handle ~ expansion properly string syncDir; if ((environment.get("SHELL") == "") && (environment.get("USER") == "")){ log.vdebug("sync_dir: No SHELL or USER environment variable configuration detected"); // No shell or user set, so expandTilde() will fail - usually headless system running under init.d / systemd or potentially Docker // Does the 'currently configured' sync_dir include a ~ if (canFind(cfg.getValueString("sync_dir"), "~")) { // A ~ was found log.vdebug("sync_dir: A '~' was found in sync_dir, using the calculated 'homePath' to replace '~'"); syncDir = cfg.homePath ~ strip(cfg.getValueString("sync_dir"), "~"); } else { // No ~ found in sync_dir, use as is log.vdebug("sync_dir: Getting syncDir from config value sync_dir"); syncDir = cfg.getValueString("sync_dir"); } } else { // A shell and user is set, expand any ~ as this will be expanded correctly if present log.vdebug("sync_dir: Getting syncDir from config value sync_dir"); if (canFind(cfg.getValueString("sync_dir"), "~")) { log.vdebug("sync_dir: A '~' was found in configured sync_dir, automatically expanding as SHELL and USER environment variable is set"); syncDir = expandTilde(cfg.getValueString("sync_dir")); } else { syncDir = cfg.getValueString("sync_dir"); } } // vdebug syncDir as set and calculated log.vdebug("syncDir: ", syncDir); // Configure logging if enabled if (cfg.getValueBool("enable_logging")){ // Read in a user defined log directory or use the default string logDir = cfg.getValueString("log_dir"); log.vlog("Using logfile dir: ", logDir); log.init(logDir); } // Configure whether notifications are used log.setNotifications(cfg.getValueBool("monitor") && !cfg.getValueBool("disable_notifications")); // upgrades if (exists(cfg.configDirName ~ "/items.db")) { if (!cfg.getValueBool("dry_run")) { safeRemove(cfg.configDirName ~ "/items.db"); } log.logAndNotify("Database schema changed, resync needed"); cfg.setValueBool("resync", true); } if (cfg.getValueBool("resync") || cfg.getValueBool("logout")) { if (cfg.getValueBool("resync")) log.vdebug("--resync requested"); log.vlog("Deleting the saved status ..."); if (!cfg.getValueBool("dry_run")) { safeRemove(cfg.databaseFilePath); safeRemove(cfg.deltaLinkFilePath); safeRemove(cfg.uploadStateFilePath); } if (cfg.getValueBool("logout")) { log.vdebug("--logout requested"); if (!cfg.getValueBool("dry_run")) { safeRemove(cfg.refreshTokenFilePath); } } } // Display current application configuration, no application initialisation if (cfg.getValueBool("display_config")){ string userConfigFilePath = cfg.configDirName ~ "/config"; string userSyncList = cfg.configDirName ~ "/sync_list"; // Display application version std.stdio.write("onedrive version = ", import("version")); // Display all of the pertinent configuration options writeln("Config path = ", cfg.configDirName); // Does a config file exist or are we using application defaults if (exists(userConfigFilePath)){ writeln("Config file found in config path = true"); } else { writeln("Config file found in config path = false"); } // Config Options writeln("Config option 'check_nosync' = ", cfg.getValueBool("check_nosync")); writeln("Config option 'sync_dir' = ", syncDir); writeln("Config option 'skip_dir' = ", cfg.getValueString("skip_dir")); writeln("Config option 'skip_file' = ", cfg.getValueString("skip_file")); writeln("Config option 'skip_dotfiles' = ", cfg.getValueBool("skip_dotfiles")); writeln("Config option 'skip_symlinks' = ", cfg.getValueBool("skip_symlinks")); writeln("Config option 'monitor_interval' = ", cfg.getValueLong("monitor_interval")); writeln("Config option 'min_notify_changes' = ", cfg.getValueLong("min_notify_changes")); writeln("Config option 'log_dir' = ", cfg.getValueString("log_dir")); // Is config option drive_id configured? if (cfg.getValueString("drive_id") != ""){ writeln("Config option 'drive_id' = ", cfg.getValueString("drive_id")); } // Is sync_list configured? if (exists(userSyncList)){ writeln("Config option 'sync_root_files' = ", cfg.getValueBool("sync_root_files")); writeln("Selective sync configured = true"); writeln("sync_list contents:"); // Output the sync_list contents auto syncListFile = File(userSyncList); auto range = syncListFile.byLine(); foreach (line; range) { writeln(line); } } else { writeln("Config option 'sync_root_files' = ", cfg.getValueBool("sync_root_files")); writeln("Selective sync configured = false"); } // exit return EXIT_SUCCESS; } if (cfg.getValueBool("force_http_11")) { log.log("NOTE: The use of --force-http-1.1 is depreciated"); } log.vlog("Initializing the OneDrive API ..."); try { online = testNetwork(); } catch (CurlException e) { // No network connection to OneDrive Service log.error("Cannot connect to Microsoft OneDrive Service"); log.error("Reason: ", e.msg); if (!cfg.getValueBool("monitor")) { return EXIT_FAILURE; } } // Initialize OneDrive, check for authorization oneDrive = new OneDriveApi(cfg); oneDrive.printAccessToken = cfg.getValueBool("print_token"); if (!oneDrive.init()) { log.error("Could not initialize the OneDrive API"); // workaround for segfault in std.net.curl.Curl.shutdown() on exit oneDrive.http.shutdown(); return EXIT_UNAUTHORIZED; } // if --synchronize or --monitor not passed in, exit & display help auto performSyncOK = false; if (cfg.getValueBool("synchronize") || cfg.getValueBool("monitor")) { performSyncOK = true; } // create-directory, remove-directory, source-directory, destination-directory // are activities that dont perform a sync no error message for these items either if (((cfg.getValueString("create_directory") != "") || (cfg.getValueString("remove_directory") != "")) || ((cfg.getValueString("source_directory") != "") && (cfg.getValueString("destination_directory") != "")) || (cfg.getValueString("get_file_link") != "") || (cfg.getValueString("get_o365_drive_id") != "") || cfg.getValueBool("display_sync_status")) { performSyncOK = true; } if (!performSyncOK) { writeln("\n--synchronize or --monitor missing from your command options or use --help for further assistance\n"); writeln("No OneDrive sync will be performed without either of these two arguments being present\n"); oneDrive.http.shutdown(); return EXIT_FAILURE; } // if --synchronize && --monitor passed in, exit & display help as these conflict with each other if (cfg.getValueBool("synchronize") && cfg.getValueBool("monitor")) { writeln("\nERROR: --synchronize and --monitor cannot be used together\n"); writeln("Refer to --help to determine which command option you should use.\n"); oneDrive.http.shutdown(); return EXIT_FAILURE; } // Initialize the item database log.vlog("Opening the item database ..."); if (!cfg.getValueBool("dry_run")) { // Load the items.sqlite3 file as the database log.vdebug("Using database file: ", asNormalizedPath(cfg.databaseFilePath)); itemDb = new ItemDatabase(cfg.databaseFilePath); } else { // Load the items-dryrun.sqlite3 file as the database log.vdebug("Using database file: ", asNormalizedPath(cfg.databaseFilePathDryRun)); itemDb = new ItemDatabase(cfg.databaseFilePathDryRun); } log.vlog("All operations will be performed in: ", syncDir); if (!exists(syncDir)) { log.vdebug("syncDir: Configured syncDir is missing. Creating: ", syncDir); try { // Attempt to create the sync dir we have been configured with mkdirRecurse(syncDir); } catch (std.file.FileException e) { // Creating the sync directory failed log.error("ERROR: Unable to create local OneDrive syncDir - ", e.msg); oneDrive.http.shutdown(); return EXIT_FAILURE; } } chdir(syncDir); // Configure selective sync by parsing and getting a regex for skip_file config component auto selectiveSync = new SelectiveSync(); if (exists(cfg.syncListFilePath)){ log.vdebug("Loading user configured sync_list file ..."); syncListConfigured = true; // list what will be synced auto syncListFile = File(cfg.syncListFilePath); auto range = syncListFile.byLine(); foreach (line; range) { log.vdebug("sync_list: ", line); } } selectiveSync.load(cfg.syncListFilePath); // Configure skip_dir & skip_file from config entries log.vdebug("Configuring skip_dir ..."); log.vdebug("skip_dir: ", cfg.getValueString("skip_dir")); selectiveSync.setDirMask(cfg.getValueString("skip_dir")); log.vdebug("Configuring skip_file ..."); // Validate skip_file to ensure that this does not contain an invalid configuration // Do not use a skip_file entry of .* as this will prevent correct searching of local changes to process. foreach(entry; cfg.getValueString("skip_file").split("|")){ if (entry == ".*") { // invalid entry element detected log.logAndNotify("ERROR: Invalid skip_file entry '.*' detected"); return EXIT_FAILURE; } } // valid entry log.vdebug("skip_file: ", cfg.getValueString("skip_file")); selectiveSync.setFileMask(cfg.getValueString("skip_file")); // Initialize the sync engine auto sync = new SyncEngine(cfg, oneDrive, itemDb, selectiveSync); try { if (!initSyncEngine(sync)) { oneDrive.http.shutdown(); return EXIT_FAILURE; } else { if (cfg.getValueString("get_file_link") == "") { // Print out that we are initializing the engine only if we are not grabbing the file link log.logAndNotify("Initializing the Synchronization Engine ..."); } } } catch (CurlException e) { if (!cfg.getValueBool("monitor")) { log.log("\nNo Internet connection."); oneDrive.http.shutdown(); return EXIT_FAILURE; } } // We should only set noRemoteDelete in an upload-only scenario if ((cfg.getValueBool("upload_only"))&&(cfg.getValueBool("no_remote_delete"))) sync.setNoRemoteDelete(); // Do we configure to disable the upload validation routine if (cfg.getValueBool("disable_upload_validation")) sync.setDisableUploadValidation(); // Do we need to validate the syncDir to check for the presence of a '.nosync' file if (cfg.getValueBool("check_nomount")) { // we were asked to check the mounts if (exists(syncDir ~ "/.nosync")) { log.logAndNotify("ERROR: .nosync file found. Aborting synchronization process to safeguard data."); oneDrive.http.shutdown(); return EXIT_FAILURE; } } // Do we need to create or remove a directory? if ((cfg.getValueString("create_directory") != "") || (cfg.getValueString("remove_directory") != "")) { if (cfg.getValueString("create_directory") != "") { // create a directory on OneDrive sync.createDirectoryNoSync(cfg.getValueString("create_directory")); } if (cfg.getValueString("remove_directory") != "") { // remove a directory on OneDrive sync.deleteDirectoryNoSync(cfg.getValueString("remove_directory")); } } // Are we renaming or moving a directory? if ((cfg.getValueString("source_directory") != "") && (cfg.getValueString("destination_directory") != "")) { // We are renaming or moving a directory sync.renameDirectoryNoSync(cfg.getValueString("source_directory"), cfg.getValueString("destination_directory")); } // Are we obtaining the Office 365 Drive ID for a given Office 365 SharePoint Shared Library? if (cfg.getValueString("get_o365_drive_id") != "") { sync.querySiteCollectionForDriveID(cfg.getValueString("get_o365_drive_id")); } // Are we obtaining the URL path for a synced file? if (cfg.getValueString("get_file_link") != "") { sync.queryOneDriveForFileURL(cfg.getValueString("get_file_link"), syncDir); } // Are we displaying the sync status of the client? if (cfg.getValueBool("display_sync_status")) { string remotePath = "/"; string localPath = "."; // Are we doing a single directory check? if (cfg.getValueString("single_directory") != ""){ // Need two different path strings here remotePath = cfg.getValueString("single_directory"); localPath = cfg.getValueString("single_directory"); } sync.queryDriveForChanges(remotePath); } // Are we performing a sync, resync or monitor operation? if ((cfg.getValueBool("synchronize")) || (cfg.getValueBool("resync")) || (cfg.getValueBool("monitor"))) { if ((cfg.getValueBool("synchronize")) || (cfg.getValueBool("resync"))) { if (online) { // Check user entry for local path - the above chdir means we are already in ~/OneDrive/ thus singleDirectory is local to this path if (cfg.getValueString("single_directory") != ""){ // Does the directory we want to sync actually exist? if (!exists(cfg.getValueString("single_directory"))){ // the requested directory does not exist .. log.logAndNotify("ERROR: The requested local directory does not exist. Please check ~/OneDrive/ for requested path"); oneDrive.http.shutdown(); return EXIT_FAILURE; } } // perform a --synchronize sync performSync(sync, cfg.getValueString("single_directory"), cfg.getValueBool("download_only"), cfg.getValueBool("local_first"), cfg.getValueBool("upload_only"), LOG_NORMAL, true, syncListConfigured); } } if (cfg.getValueBool("monitor")) { log.logAndNotify("Initializing monitor ..."); log.log("OneDrive monitor interval (seconds): ", cfg.getValueLong("monitor_interval")); Monitor m = new Monitor(selectiveSync); m.onDirCreated = delegate(string path) { log.vlog("[M] Directory created: ", path); try { sync.scanForDifferences(path); } catch (CurlException e) { log.vlog("Offline, cannot create remote dir!"); } catch(Exception e) { log.logAndNotify("Cannot create remote directory: ", e.msg); } }; m.onFileChanged = delegate(string path) { log.vlog("[M] File changed: ", path); try { sync.scanForDifferences(path); } catch (CurlException e) { log.vlog("Offline, cannot upload changed item!"); } catch(Exception e) { log.logAndNotify("Cannot upload file changes/creation: ", e.msg); } }; m.onDelete = delegate(string path) { log.vlog("[M] Item deleted: ", path); try { sync.deleteByPath(path); } catch (CurlException e) { log.vlog("Offline, cannot delete item!"); } catch(SyncException e) { if (e.msg == "The item to delete is not in the local database") { log.vlog("Item cannot be deleted because not found in database"); } else { log.logAndNotify("Cannot delete remote item: ", e.msg); } } catch(Exception e) { log.logAndNotify("Cannot delete remote item: ", e.msg); } }; m.onMove = delegate(string from, string to) { log.vlog("[M] Item moved: ", from, " -> ", to); try { sync.uploadMoveItem(from, to); } catch (CurlException e) { log.vlog("Offline, cannot move item!"); } catch(Exception e) { log.logAndNotify("Cannot move item:, ", e.msg); } }; signal(SIGINT, &exitHandler); signal(SIGTERM, &exitHandler); // attempt to initialise monitor class if (!cfg.getValueBool("download_only")) { try { m.init(cfg, cfg.getValueLong("verbose") > 0, cfg.getValueBool("skip_symlinks"), cfg.getValueBool("check_nosync")); } catch (MonitorException e) { // monitor initialisation failed log.error("ERROR: ", e.msg); exit(-1); } } // monitor loop immutable auto checkInterval = dur!"seconds"(cfg.getValueLong("monitor_interval")); immutable auto logInterval = cfg.getValueLong("monitor_log_frequency"); immutable auto fullScanFrequency = cfg.getValueLong("monitor_fullscan_frequency"); auto lastCheckTime = MonoTime.currTime(); auto logMonitorCounter = 0; auto fullScanCounter = 0; bool fullScanRequired = true; bool syncListConfiguredOverride = false; // if sync list is configured, set to true if (syncListConfigured) { syncListConfiguredOverride = true; } while (true) { if (!cfg.getValueBool("download_only")) m.update(online); auto currTime = MonoTime.currTime(); if (currTime - lastCheckTime > checkInterval) { // log monitor output suppression logMonitorCounter += 1; if (logMonitorCounter > logInterval) { logMonitorCounter = 1; } // full scan of sync_dir fullScanCounter += 1; if (fullScanCounter > fullScanFrequency){ fullScanCounter = 1; fullScanRequired = true; if (syncListConfigured) { syncListConfiguredOverride = true; } } // sync option handling per sync loop log.vdebug("syncListConfigured = ", syncListConfigured); log.vdebug("fullScanRequired = ", fullScanRequired); log.vdebug("syncListConfiguredOverride = ", syncListConfiguredOverride); // log.logAndNotify("DEBUG trying to create checkpoint"); // auto res = itemdb.db_checkpoint(); // log.logAndNotify("Checkpoint return: ", res); // itemdb.dump_open_statements(); try { if (!initSyncEngine(sync)) { oneDrive.http.shutdown(); return EXIT_FAILURE; } try { // perform a --monitor sync log.vlog("Starting a sync with OneDrive"); performSync(sync, cfg.getValueString("single_directory"), cfg.getValueBool("download_only"), cfg.getValueBool("local_first"), cfg.getValueBool("upload_only"), (logMonitorCounter == logInterval ? MONITOR_LOG_QUIET : MONITOR_LOG_SILENT), fullScanRequired, syncListConfiguredOverride); if (!cfg.getValueBool("download_only")) { // discard all events that may have been generated by the sync m.update(false); } } catch (CurlException e) { // we already tried three times in the performSync routine // if we still have problems, then the sync handle might have // gone stale and we need to re-initialize the sync engine log.log("Persistent connection errors, reinitializing connection"); sync.reset(); } } catch (CurlException e) { log.log("Cannot initialize connection to OneDrive"); } // performSync complete, set lastCheckTime to current time log.vlog("Sync with OneDrive is complete"); fullScanRequired = false; if (syncListConfigured) { syncListConfiguredOverride = false; } lastCheckTime = MonoTime.currTime(); GC.collect(); } Thread.sleep(dur!"msecs"(500)); } } } // Workaround for segfault in std.net.curl.Curl.shutdown() on exit oneDrive.http.shutdown(); // Make sure the .wal file is incorporated into the main db before we exit destroy(itemDb); // --dry-run temp database cleanup if (cfg.getValueBool("dry_run")) { if (exists(cfg.databaseFilePathDryRun)) { // remove the file log.vdebug("Removing items-dryrun.sqlite3 as dry run operations complete"); safeRemove(cfg.databaseFilePathDryRun); } } return EXIT_SUCCESS; } bool initSyncEngine(SyncEngine sync) { try { sync.init(); } catch (OneDriveException e) { if (e.httpStatusCode == 400 || e.httpStatusCode == 401) { // Authorization is invalid log.log("\nAuthorization token invalid, use --logout to authorize the client again\n"); return false; } if (e.httpStatusCode >= 500) { // There was a HTTP 5xx Server Side Error, message already printed return false; } } return true; } // try to synchronize the folder three times void performSync(SyncEngine sync, string singleDirectory, bool downloadOnly, bool localFirst, bool uploadOnly, long logLevel, bool fullScanRequired, bool syncListConfigured) { int count; string remotePath = "/"; string localPath = "."; // Are we doing a single directory sync? if (singleDirectory != ""){ // Need two different path strings here remotePath = singleDirectory; localPath = singleDirectory; // Set flag for singleDirectoryScope for change handling sync.setSingleDirectoryScope(); } // Due to Microsoft Sharepoint 'enrichment' of files, we try to download the Microsoft modified file automatically // Set flag if we are in upload only state to handle this differently // See: https://github.com/OneDrive/onedrive-api-docs/issues/935 for further details if (uploadOnly) sync.setUploadOnly(); do { try { if (singleDirectory != ""){ // we were requested to sync a single directory log.vlog("Syncing changes from this selected path: ", singleDirectory); if (uploadOnly){ // Upload Only of selected single directory if (logLevel < MONITOR_LOG_QUIET) log.log("Syncing changes from selected local path only - NOT syncing data changes from OneDrive ..."); sync.scanForDifferences(localPath); } else { // No upload only if (localFirst) { // Local First if (logLevel < MONITOR_LOG_QUIET) log.log("Syncing changes from selected local path first before downloading changes from OneDrive ..."); sync.scanForDifferences(localPath); sync.applyDifferencesSingleDirectory(remotePath); } else { // OneDrive First if (logLevel < MONITOR_LOG_QUIET) log.log("Syncing changes from selected OneDrive path ..."); sync.applyDifferencesSingleDirectory(remotePath); // is this a download only request? if (!downloadOnly) { // process local changes sync.scanForDifferences(localPath); // ensure that the current remote state is updated locally sync.applyDifferencesSingleDirectory(remotePath); } } } } else { // no single directory sync if (uploadOnly){ // Upload Only of entire sync_dir if (logLevel < MONITOR_LOG_QUIET) log.log("Syncing changes from local path only - NOT syncing data changes from OneDrive ..."); sync.scanForDifferences(localPath); } else { // No upload only if (localFirst) { // sync local files first before downloading from OneDrive if (logLevel < MONITOR_LOG_QUIET) log.log("Syncing changes from local path first before downloading changes from OneDrive ..."); sync.scanForDifferences(localPath); sync.applyDifferences(syncListConfigured); } else { // sync from OneDrive first before uploading files to OneDrive if (logLevel < MONITOR_LOG_SILENT) log.log("Syncing changes from OneDrive ..."); // For the initial sync, always use the delta link so that we capture all the right delta changes including adds, moves & deletes sync.applyDifferences(false); // Is a full scan of the entire sync_dir required? if (fullScanRequired) { // is this a download only request? if (!downloadOnly) { // process local changes walking the entire path checking for changes // in monitor mode all local changes are captured via inotify // thus scanning every 'monitor_interval' (default 45 seconds) for local changes is excessive and not required sync.scanForDifferences(localPath); // ensure that the current remote state is updated locally to ensure everything is consistent // for the 'true-up' sync, if sync_list is configured, syncListConfigured = true, thus a FULL walk of all OneDrive objects will be requested and used if required sync.applyDifferences(syncListConfigured); } } } } } count = -1; } catch (Exception e) { if (++count == 3) { log.log("Giving up on sync after three attempts: ", e.msg); throw e; } else log.log("Retry sync count: ", count, ": ", e.msg); } } while (count != -1); } // getting around the @nogc problem // https://p0nce.github.io/d-idioms/#Bypassing-@nogc auto assumeNoGC(T) (T t) if (isFunctionPointer!T || isDelegate!T) { enum attrs = functionAttributes!T | FunctionAttribute.nogc; return cast(SetFunctionAttributes!(T, functionLinkage!T, attrs)) t; } extern(C) nothrow @nogc @system void exitHandler(int value) { try { assumeNoGC ( () { log.log("Got termination signal, shutting down db connection"); // make sure the .wal file is incorporated into the main db destroy(itemDb); // workaround for segfault in std.net.curl.Curl.shutdown() on exit oneDrive.http.shutdown(); })(); } catch(Exception e) {} exit(0); } onedrive-2.3.13/src/monitor.d000066400000000000000000000170321360252424000160370ustar00rootroot00000000000000import core.sys.linux.sys.inotify; import core.stdc.errno; import core.sys.posix.poll, core.sys.posix.unistd; import std.exception, std.file, std.path, std.regex, std.stdio, std.string; import config; import selective; import util; static import log; // relevant inotify events private immutable uint32_t mask = IN_CLOSE_WRITE | IN_CREATE | IN_DELETE | IN_MOVE | IN_IGNORED | IN_Q_OVERFLOW; class MonitorException: ErrnoException { @safe this(string msg, string file = __FILE__, size_t line = __LINE__) { super(msg, file, line); } } final class Monitor { bool verbose; // inotify file descriptor private int fd; // map every inotify watch descriptor to its directory private string[int] wdToDirName; // map the inotify cookies of move_from events to their path private string[int] cookieToPath; // buffer to receive the inotify events private void[] buffer; // skip symbolic links bool skip_symlinks; // check for .nosync if enabled bool check_nosync; private SelectiveSync selectiveSync; void delegate(string path) onDirCreated; void delegate(string path) onFileChanged; void delegate(string path) onDelete; void delegate(string from, string to) onMove; this(SelectiveSync selectiveSync) { assert(selectiveSync); this.selectiveSync = selectiveSync; } void init(Config cfg, bool verbose, bool skip_symlinks, bool check_nosync) { this.verbose = verbose; this.skip_symlinks = skip_symlinks; this.check_nosync = check_nosync; assert(onDirCreated && onFileChanged && onDelete && onMove); fd = inotify_init(); if (fd < 0) throw new MonitorException("inotify_init failed"); if (!buffer) buffer = new void[4096]; // from which point do we start watching for changes? string monitorPath; if (cfg.getValueString("single_directory") != ""){ // single directory in use, monitor only this monitorPath = "./" ~ cfg.getValueString("single_directory"); } else { // default monitorPath = "."; } addRecursive(monitorPath); } void shutdown() { if (fd > 0) close(fd); wdToDirName = null; } private void addRecursive(string dirname) { // skip non existing/disappeared items if (!exists(dirname)) { log.vlog("Not adding non-existing/disappeared directory: ", dirname); return; } // skip filtered items if (dirname != ".") { if (selectiveSync.isDirNameExcluded(strip(dirname,"./"))) { return; } if (selectiveSync.isFileNameExcluded(baseName(dirname))) { return; } if (selectiveSync.isPathExcludedViaSyncList(buildNormalizedPath(dirname))) { return; } } // skip symlinks if configured if (isSymlink(dirname)) { // if config says so we skip all symlinked items if (skip_symlinks) { // dont add a watch for this directory return; } } // Do we need to check for .nosync? Only if check_nosync is true if (check_nosync) { if (exists(buildNormalizedPath(dirname) ~ "/.nosync")) { log.vlog("Skipping watching path - .nosync found & --check-for-nosync enabled: ", buildNormalizedPath(dirname)); return; } } add(dirname); try { auto pathList = dirEntries(dirname, SpanMode.shallow, false); foreach(DirEntry entry; pathList) { if (entry.isDir) { addRecursive(entry.name); } } } catch (std.file.FileException e) { log.vdebug("ERROR: ", e.msg); return; } } private void add(string pathname) { int wd = inotify_add_watch(fd, toStringz(pathname), mask); if (wd < 0) { if (errno() == ENOSPC) { log.log("The user limit on the total number of inotify watches has been reached."); log.log("To see the current max number of watches run:"); log.log("sysctl fs.inotify.max_user_watches"); log.log("To change the current max number of watches to 524288 run:"); log.log("sudo sysctl fs.inotify.max_user_watches=524288"); } if (errno() == 13) { log.vlog("WARNING: inotify_add_watch failed - permission denied: ", pathname); return; } // Flag any other errors log.error("ERROR: inotify_add_watch failed: ", pathname); return; } wdToDirName[wd] = buildNormalizedPath(pathname) ~ "/"; log.vlog("Monitor directory: ", pathname); } // remove a watch descriptor private void remove(int wd) { assert(wd in wdToDirName); int ret = inotify_rm_watch(fd, wd); if (ret < 0) throw new MonitorException("inotify_rm_watch failed"); log.vlog("Monitored directory removed: ", wdToDirName[wd]); wdToDirName.remove(wd); } // remove the watch descriptors associated to the given path private void remove(const(char)[] path) { path ~= "/"; foreach (wd, dirname; wdToDirName) { if (dirname.startsWith(path)) { int ret = inotify_rm_watch(fd, wd); if (ret < 0) throw new MonitorException("inotify_rm_watch failed"); wdToDirName.remove(wd); log.vlog("Monitored directory removed: ", dirname); } } } // return the file path from an inotify event private string getPath(const(inotify_event)* event) { string path = wdToDirName[event.wd]; if (event.len > 0) path ~= fromStringz(event.name.ptr); return path; } void update(bool useCallbacks = true) { pollfd fds = { fd: fd, events: POLLIN }; while (true) { int ret = poll(&fds, 1, 0); if (ret == -1) throw new MonitorException("poll failed"); else if (ret == 0) break; // no events available size_t length = read(fd, buffer.ptr, buffer.length); if (length == -1) throw new MonitorException("read failed"); int i = 0; while (i < length) { inotify_event *event = cast(inotify_event*) &buffer[i]; string path; if (event.mask & IN_IGNORED) { // forget the directory associated to the watch descriptor wdToDirName.remove(event.wd); goto skip; } else if (event.mask & IN_Q_OVERFLOW) { throw new MonitorException("Inotify overflow, events missing"); } // skip filtered items path = getPath(event); if (selectiveSync.isDirNameExcluded(strip(path,"./"))) { goto skip; } if (selectiveSync.isFileNameExcluded(strip(path,"./"))) { goto skip; } if (selectiveSync.isPathExcludedViaSyncList(path)) { goto skip; } if (event.mask & IN_MOVED_FROM) { log.vdebug("event IN_MOVED_FROM: ", path); cookieToPath[event.cookie] = path; } else if (event.mask & IN_MOVED_TO) { log.vdebug("event IN_MOVED_TO: ", path); if (event.mask & IN_ISDIR) addRecursive(path); auto from = event.cookie in cookieToPath; if (from) { cookieToPath.remove(event.cookie); if (useCallbacks) onMove(*from, path); } else { // item moved from the outside if (event.mask & IN_ISDIR) { if (useCallbacks) onDirCreated(path); } else { if (useCallbacks) onFileChanged(path); } } } else if (event.mask & IN_CREATE) { log.vdebug("event IN_CREATE: ", path); if (event.mask & IN_ISDIR) { addRecursive(path); if (useCallbacks) onDirCreated(path); } } else if (event.mask & IN_DELETE) { log.vdebug("event IN_DELETE: ", path); if (useCallbacks) onDelete(path); } else if ((event.mask & IN_CLOSE_WRITE) && !(event.mask & IN_ISDIR)) { log.vdebug("event IN_CLOSE_WRITE and ...: ", path); if (useCallbacks) onFileChanged(path); } else { log.vdebug("event unhandled: ", path); assert(0); } skip: i += inotify_event.sizeof + event.len; } // assume that the items moved outside the watched directory have been deleted foreach (cookie, path; cookieToPath) { log.vdebug("deleting (post loop): ", path); if (useCallbacks) onDelete(path); remove(path); cookieToPath.remove(cookie); } } } } onedrive-2.3.13/src/notifications/000077500000000000000000000000001360252424000170515ustar00rootroot00000000000000onedrive-2.3.13/src/notifications/README000066400000000000000000000005771360252424000177420ustar00rootroot00000000000000The files in this directory have been obtained form the following places: dnotify.d https://github.com/Dav1dde/dnotify/blob/master/dnotify.d License: Creative Commons Zro 1.0 Universal see https://github.com/Dav1dde/dnotify/blob/master/LICENSE notify.d https://github.com/D-Programming-Deimos/libnotify/blob/master/deimos/notify/notify.d License: GPL 2.1 or upwards, see file onedrive-2.3.13/src/notifications/dnotify.d000066400000000000000000000212431360252424000206740ustar00rootroot00000000000000module dnotify; private { import std.string : toStringz; import std.conv : to; import std.traits : isPointer, isArray; import std.variant : Variant; import std.array : appender; import deimos.notify.notify; } public import deimos.notify.notify : NOTIFY_EXPIRES_DEFAULT, NOTIFY_EXPIRES_NEVER, NotifyUrgency; version(NoPragma) { } else { pragma(lib, "notify"); pragma(lib, "gmodule"); pragma(lib, "glib-2.0"); } extern (C) { private void g_free(void* mem); private void g_list_free(GList* glist); } version(NoGdk) { } else { version(NoPragma) { } else { pragma(lib, "gdk_pixbuf"); } private: extern (C) { GdkPixbuf* gdk_pixbuf_new_from_file(const(char)* filename, GError **error); } } class NotificationError : Exception { string message; GError* gerror; this(GError* gerror) { this.message = to!(string)(gerror.message); this.gerror = gerror; super(this.message); } this(string message) { this.message = message; super(message); } } bool check_availability() { // notify_init might return without dbus server actually started // try to check for running dbus server char **ret_name; char **ret_vendor; char **ret_version; char **ret_spec_version; bool ret; try { return notify_get_server_info(ret_name, ret_vendor, ret_version, ret_spec_version); } catch (NotificationError e) { throw new NotificationError("Cannot find dbus server!"); } } void init(in char[] name) { notify_init(name.toStringz()); } alias notify_is_initted is_initted; alias notify_uninit uninit; static this() { init(__FILE__); } static ~this() { uninit(); } string get_app_name() { return to!(string)(notify_get_app_name()); } void set_app_name(in char[] app_name) { notify_set_app_name(app_name.toStringz()); } string[] get_server_caps() { auto result = appender!(string[])(); GList* list = notify_get_server_caps(); if(list !is null) { for(GList* c = list; c !is null; c = c.next) { result.put(to!(string)(cast(char*)c.data)); g_free(c.data); } g_list_free(list); } return result.data; } struct ServerInfo { string name; string vendor; string version_; string spec_version; } ServerInfo get_server_info() { char* name; char* vendor; char* version_; char* spec_version; notify_get_server_info(&name, &vendor, &version_, &spec_version); scope(exit) { g_free(name); g_free(vendor); g_free(version_); g_free(spec_version); } return ServerInfo(to!string(name), to!string(vendor), to!string(version_), to!string(spec_version)); } struct Action { const(char[]) id; const(char[]) label; NotifyActionCallback callback; void* user_ptr; } class Notification { NotifyNotification* notify_notification; const(char)[] summary; const(char)[] body_; const(char)[] icon; bool closed = true; private int _timeout = NOTIFY_EXPIRES_DEFAULT; const(char)[] _category; NotifyUrgency _urgency; GdkPixbuf* _image; Variant[const(char)[]] _hints; const(char)[] _app_name; Action[] _actions; this(in char[] summary, in char[] body_, in char[] icon="") in { assert(is_initted(), "call dnotify.init() before using Notification"); } body { this.summary = summary; this.body_ = body_; this.icon = icon; notify_notification = notify_notification_new(summary.toStringz(), body_.toStringz(), icon.toStringz()); } bool update(in char[] summary, in char[] body_, in char[] icon="") { this.summary = summary; this.body_ = body_; this.icon = icon; return notify_notification_update(notify_notification, summary.toStringz(), body_.toStringz(), icon.toStringz()); } void show() { GError* ge; if(!notify_notification_show(notify_notification, &ge)) { throw new NotificationError(ge); } } @property int timeout() { return _timeout; } @property void timeout(int timeout) { this._timeout = timeout; notify_notification_set_timeout(notify_notification, timeout); } @property const(char[]) category() { return _category; } @property void category(in char[] category) { this._category = category; notify_notification_set_category(notify_notification, category.toStringz()); } @property NotifyUrgency urgency() { return _urgency; } @property void urgency(NotifyUrgency urgency) { this._urgency = urgency; notify_notification_set_urgency(notify_notification, urgency); } void set_image(GdkPixbuf* pixbuf) { notify_notification_set_image_from_pixbuf(notify_notification, pixbuf); //_image = pixbuf; } version(NoGdk) { } else { void set_image(in char[] filename) { GError* ge; // TODO: free pixbuf GdkPixbuf* pixbuf = gdk_pixbuf_new_from_file(filename.toStringz(), &ge); if(pixbuf is null) { if(ge is null) { throw new NotificationError("Unable to load file: " ~ filename.idup); } else { throw new NotificationError(ge); } } assert(notify_notification !is null); notify_notification_set_image_from_pixbuf(notify_notification, pixbuf); // TODO: fix segfault //_image = pixbuf; } } @property GdkPixbuf* image() { return _image; } // using deprecated set_hint_* functions (GVariant is an opaque structure, which needs the glib) void set_hint(T)(in char[] key, T value) { static if(is(T == int)) { notify_notification_set_hint_int32(notify_notification, key, value); } else static if(is(T == uint)) { notify_notification_set_hint_uint32(notify_notification, key, value); } else static if(is(T == double)) { notify_notification_set_hint_double(notify_notification, key, value); } else static if(is(T : const(char)[])) { notify_notification_set_hint_string(notify_notification, key, value.toStringz()); } else static if(is(T == ubyte)) { notify_notification_set_hint_byte(notify_notification, key, value); } else static if(is(T == ubyte[])) { notify_notification_set_hint_byte_array(notify_notification, key, value.ptr, value.length); } else { static assert(false, "unsupported value for Notification.set_hint"); } _hints[key] = Variant(value); } // unset hint? Variant get_hint(in char[] key) { return _hints[key]; } @property const(char)[] app_name() { return _app_name; } @property void app_name(in char[] name) { this._app_name = app_name; notify_notification_set_app_name(notify_notification, app_name.toStringz()); } void add_action(T)(in char[] action, in char[] label, NotifyActionCallback callback, T user_data) { static if(isPointer!T) { void* user_ptr = cast(void*)user_data; } else static if(isArray!T) { void* user_ptr = cast(void*)user_data.ptr; } else { void* user_ptr = cast(void*)&user_data; } notify_notification_add_action(notify_notification, action.toStringz(), label.toStringz(), callback, user_ptr, null); _actions ~= Action(action, label, callback, user_ptr); } void add_action()(Action action) { notify_notification_add_action(notify_notification, action.id.toStringz(), action.label.toStringz(), action.callback, action.user_ptr, null); _actions ~= action; } @property Action[] actions() { return _actions; } void clear_actions() { notify_notification_clear_actions(notify_notification); } void close() { GError* ge; if(!notify_notification_close(notify_notification, &ge)) { throw new NotificationError(ge); } } @property int closed_reason() { return notify_notification_get_closed_reason(notify_notification); } } version(TestMain) { import std.stdio; void main() { writeln(get_app_name()); set_app_name("bla"); writeln(get_app_name()); writeln(get_server_caps()); writeln(get_server_info()); auto n = new Notification("foo", "bar", "notification-message-im"); n.timeout = 3; n.show(); } } onedrive-2.3.13/src/notifications/notify.d000066400000000000000000000151001360252424000205230ustar00rootroot00000000000000/** * Copyright (C) 2004-2006 Christian Hammond * Copyright (C) 2010 Red Hat, Inc. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. */ module deimos.notify.notify; enum NOTIFY_VERSION_MAJOR = 0; enum NOTIFY_VERSION_MINOR = 7; enum NOTIFY_VERSION_MICRO = 5; template NOTIFY_CHECK_VERSION(int major, int minor, int micro) { enum NOTIFY_CHECK_VERSION = ((NOTIFY_VERSION_MAJOR > major) || (NOTIFY_VERSION_MAJOR == major && NOTIFY_VERSION_MINOR > minor) || (NOTIFY_VERSION_MAJOR == major && NOTIFY_VERSION_MINOR == minor && NOTIFY_VERSION_MICRO >= micro)); } alias ulong GType; alias void function(void*) GFreeFunc; struct GError { uint domain; int code; char* message; } struct GList { void* data; GList* next; GList* prev; } // dummies struct GdkPixbuf {} struct GObject {} struct GObjectClass {} struct GVariant {} GType notify_urgency_get_type(); /** * NOTIFY_EXPIRES_DEFAULT: * * The default expiration time on a notification. */ enum NOTIFY_EXPIRES_DEFAULT = -1; /** * NOTIFY_EXPIRES_NEVER: * * The notification never expires. It stays open until closed by the calling API * or the user. */ enum NOTIFY_EXPIRES_NEVER = 0; // #define NOTIFY_TYPE_NOTIFICATION (notify_notification_get_type ()) // #define NOTIFY_NOTIFICATION(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), NOTIFY_TYPE_NOTIFICATION, NotifyNotification)) // #define NOTIFY_NOTIFICATION_CLASS(k) (G_TYPE_CHECK_CLASS_CAST((k), NOTIFY_TYPE_NOTIFICATION, NotifyNotificationClass)) // #define NOTIFY_IS_NOTIFICATION(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), NOTIFY_TYPE_NOTIFICATION)) // #define NOTIFY_IS_NOTIFICATION_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), NOTIFY_TYPE_NOTIFICATION)) // #define NOTIFY_NOTIFICATION_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), NOTIFY_TYPE_NOTIFICATION, NotifyNotificationClass)) extern (C) { struct NotifyNotificationPrivate; struct NotifyNotification { /*< private >*/ GObject parent_object; NotifyNotificationPrivate *priv; } struct NotifyNotificationClass { GObjectClass parent_class; /* Signals */ void function(NotifyNotification *notification) closed; } /** * NotifyUrgency: * @NOTIFY_URGENCY_LOW: Low urgency. Used for unimportant notifications. * @NOTIFY_URGENCY_NORMAL: Normal urgency. Used for most standard notifications. * @NOTIFY_URGENCY_CRITICAL: Critical urgency. Used for very important notifications. * * The urgency level of the notification. */ enum NotifyUrgency { NOTIFY_URGENCY_LOW, NOTIFY_URGENCY_NORMAL, NOTIFY_URGENCY_CRITICAL, } /** * NotifyActionCallback: * @notification: * @action: * @user_data: * * An action callback function. */ alias void function(NotifyNotification* notification, char* action, void* user_data) NotifyActionCallback; GType notify_notification_get_type(); NotifyNotification* notify_notification_new(const(char)* summary, const(char)* body_, const(char)* icon); bool notify_notification_update(NotifyNotification* notification, const(char)* summary, const(char)* body_, const(char)* icon); bool notify_notification_show(NotifyNotification* notification, GError** error); void notify_notification_set_timeout(NotifyNotification* notification, int timeout); void notify_notification_set_category(NotifyNotification* notification, const(char)* category); void notify_notification_set_urgency(NotifyNotification* notification, NotifyUrgency urgency); void notify_notification_set_image_from_pixbuf(NotifyNotification* notification, GdkPixbuf* pixbuf); void notify_notification_set_icon_from_pixbuf(NotifyNotification* notification, GdkPixbuf* icon); void notify_notification_set_hint_int32(NotifyNotification* notification, const(char)* key, int value); void notify_notification_set_hint_uint32(NotifyNotification* notification, const(char)* key, uint value); void notify_notification_set_hint_double(NotifyNotification* notification, const(char)* key, double value); void notify_notification_set_hint_string(NotifyNotification* notification, const(char)* key, const(char)* value); void notify_notification_set_hint_byte(NotifyNotification* notification, const(char)* key, ubyte value); void notify_notification_set_hint_byte_array(NotifyNotification* notification, const(char)* key, const(ubyte)* value, ulong len); void notify_notification_set_hint(NotifyNotification* notification, const(char)* key, GVariant* value); void notify_notification_set_app_name(NotifyNotification* notification, const(char)* app_name); void notify_notification_clear_hints(NotifyNotification* notification); void notify_notification_add_action(NotifyNotification* notification, const(char)* action, const(char)* label, NotifyActionCallback callback, void* user_data, GFreeFunc free_func); void notify_notification_clear_actions(NotifyNotification* notification); bool notify_notification_close(NotifyNotification* notification, GError** error); int notify_notification_get_closed_reason(const NotifyNotification* notification); bool notify_init(const(char)* app_name); void notify_uninit(); bool notify_is_initted(); const(char)* notify_get_app_name(); void notify_set_app_name(const(char)* app_name); GList *notify_get_server_caps(); bool notify_get_server_info(char** ret_name, char** ret_vendor, char** ret_version, char** ret_spec_version); } version(MainTest) { import std.string; void main() { notify_init("test".toStringz()); auto n = notify_notification_new("summary".toStringz(), "body".toStringz(), "none".toStringz()); GError* ge; notify_notification_show(n, &ge); scope(success) notify_uninit(); } } onedrive-2.3.13/src/onedrive.d000066400000000000000000000775521360252424000162000ustar00rootroot00000000000000import std.net.curl; import etc.c.curl: CurlOption; import std.datetime, std.exception, std.file, std.json, std.path; import std.stdio, std.string, std.uni, std.uri, std.file; import std.array: split; import core.stdc.stdlib; import core.thread, std.conv, std.math; import std.algorithm.searching; import progress; import config; static import log; shared bool debugResponse = false; private bool dryRun = false; private bool simulateNoRefreshTokenFile = false; private immutable { // Client Identifier string clientId = "22c49a0d-d21c-4792-aed1-8f163c982546"; // Personal & Business Queries string authUrl = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize"; string redirectUrl = "https://login.microsoftonline.com/common/oauth2/nativeclient"; string tokenUrl = "https://login.microsoftonline.com/common/oauth2/v2.0/token"; string driveByIdUrl = "https://graph.microsoft.com/v1.0/drives/"; // Office 365 / SharePoint Queries string siteSearchUrl = "https://graph.microsoft.com/v1.0/sites?search"; string siteDriveUrl = "https://graph.microsoft.com/v1.0/sites/"; } private { string driveUrl = "https://graph.microsoft.com/v1.0/me/drive"; string itemByIdUrl = "https://graph.microsoft.com/v1.0/me/drive/items/"; string itemByPathUrl = "https://graph.microsoft.com/v1.0/me/drive/root:/"; string driveId = ""; } class OneDriveException: Exception { // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/concepts/errors int httpStatusCode; JSONValue error; @safe pure this(int httpStatusCode, string reason, string file = __FILE__, size_t line = __LINE__) { this.httpStatusCode = httpStatusCode; this.error = error; string msg = format("HTTP request returned status code %d (%s)", httpStatusCode, reason); super(msg, file, line); } this(int httpStatusCode, string reason, ref const JSONValue error, string file = __FILE__, size_t line = __LINE__) { this.httpStatusCode = httpStatusCode; this.error = error; string msg = format("HTTP request returned status code %d (%s)\n%s", httpStatusCode, reason, toJSON(error, true)); super(msg, file, line); } } final class OneDriveApi { private Config cfg; private string refreshToken, accessToken; private SysTime accessTokenExpiration; /* private */ HTTP http; // if true, every new access token is printed bool printAccessToken; this(Config cfg) { this.cfg = cfg; http = HTTP(); // DNS lookup timeout http.dnsTimeout = (dur!"seconds"(5)); // timeout for connecting http.connectTimeout = (dur!"seconds"(10)); // Timeouts // with the following settings we force // - if there is no data flow for 5min, abort // - if the download time for one item exceeds 1h, abort // // timeout for activity on connection // this translates into Curl's CURLOPT_LOW_SPEED_TIME // which says // It contains the time in number seconds that the // transfer speed should be below the CURLOPT_LOW_SPEED_LIMIT // for the library to consider it too slow and abort. http.dataTimeout = (dur!"seconds"(300)); // maximum time an operation is allowed to take // This includes dns resolution, connecting, data transfer, etc. http.operationTimeout = (dur!"seconds"(3600)); // Specify how many redirects should be allowed http.maxRedirects(5); // Do we enable curl debugging? if (cfg.getValueBool("debug_https")) { http.verbose = true; .debugResponse = true; } // Custom User Agent if (cfg.getValueString("user_agent") != "") { http.setUserAgent = cfg.getValueString("user_agent"); } else { http.setUserAgent = "OneDrive Client for Linux " ~ strip(import("version")); } // What version of HTTP protocol do we use? // Curl >= 7.62.0 defaults to http2 for a significant number of operations if (cfg.getValueBool("force_http_2")) { // Use curl defaults log.vdebug("Upgrading all HTTP operations to HTTP/2 where applicable"); } else { // Downgrade curl by default due to silent exist issues when using http/2 // See issue #501 for details and discussion log.vdebug("Downgrading all HTTP operations to HTTP/1.1 by default"); // Downgrade to HTTP 1.1 - yes version = 2 is HTTP 1.1 http.handle.set(CurlOption.http_version,2); } // Do we set the dryRun handlers? if (cfg.getValueBool("dry_run")) { .dryRun = true; if (cfg.getValueBool("logout")) { .simulateNoRefreshTokenFile = true; } } } bool init() { try { driveId = cfg.getValueString("drive_id"); if (driveId.length) { driveUrl = driveByIdUrl ~ driveId; itemByIdUrl = driveUrl ~ "/items"; itemByPathUrl = driveUrl ~ "/root:/"; } } catch (Exception e) {} if (!.dryRun) { // original code try { refreshToken = readText(cfg.refreshTokenFilePath); } catch (FileException e) { try { return authorize(); } catch (CurlException e) { log.error("Cannot authorize with Microsoft OneDrive Service"); return false; } } return true; } else { // --dry-run if (!.simulateNoRefreshTokenFile) { try { refreshToken = readText(cfg.refreshTokenFilePath); } catch (FileException e) { return authorize(); } return true; } else { // --dry-run & --logout return authorize(); } } } bool authorize() { import std.stdio, std.regex; char[] response; string url = authUrl ~ "?client_id=" ~ clientId ~ "&scope=Files.ReadWrite%20Files.ReadWrite.all%20Sites.ReadWrite.All%20offline_access&response_type=code&redirect_uri=" ~ redirectUrl; string authFilesString = cfg.getValueString("auth_files"); if (authFilesString == "") { log.log("Authorize this app visiting:\n"); write(url, "\n\n", "Enter the response uri: "); readln(response); } else { string[] authFiles = authFilesString.split(":"); string authUrl = authFiles[0]; string responseUrl = authFiles[1]; auto authUrlFile = File(authUrl, "w"); authUrlFile.write(url); authUrlFile.close(); while (!exists(responseUrl)) { Thread.sleep(dur!("msecs")(100)); } response = cast(char[]) read(responseUrl); try { std.file.remove(authUrl); std.file.remove(responseUrl); } catch (FileException e) { log.error("Cannot remove files ", authUrl, " ", responseUrl); } } // match the authorization code auto c = matchFirst(response, r"(?:[\?&]code=)([\w\d-]+)"); if (c.empty) { log.log("Invalid uri"); return false; } c.popFront(); // skip the whole match redeemToken(c.front); return true; } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/drive_get JSONValue getDefaultDrive() { checkAccessTokenExpired(); return get(driveUrl); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_get JSONValue getDefaultRoot() { checkAccessTokenExpired(); return get(driveUrl ~ "/root"); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_delta JSONValue viewChangesById(const(char)[] driveId, const(char)[] id, const(char)[] deltaLink) { checkAccessTokenExpired(); const(char)[] url = deltaLink; if (url == null) { url = driveByIdUrl ~ driveId ~ "/items/" ~ id ~ "/delta"; url ~= "?select=id,name,eTag,cTag,deleted,file,folder,root,fileSystemInfo,remoteItem,parentReference,size"; } return get(url); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_get_content void downloadById(const(char)[] driveId, const(char)[] id, string saveToPath, long fileSize) { checkAccessTokenExpired(); scope(failure) { if (exists(saveToPath)) remove(saveToPath); } mkdirRecurse(dirName(saveToPath)); const(char)[] url = driveByIdUrl ~ driveId ~ "/items/" ~ id ~ "/content?AVOverride=1"; download(url, saveToPath, fileSize); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_put_content JSONValue simpleUpload(string localPath, string parentDriveId, string parentId, string filename, const(char)[] eTag = null) { checkAccessTokenExpired(); string url = driveByIdUrl ~ parentDriveId ~ "/items/" ~ parentId ~ ":/" ~ encodeComponent(filename) ~ ":/content"; // TODO: investigate why this fails for remote folders //if (eTag) http.addRequestHeader("If-Match", eTag); /*else http.addRequestHeader("If-None-Match", "*");*/ return upload(localPath, url); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_put_content JSONValue simpleUploadReplace(string localPath, string driveId, string id, const(char)[] eTag = null) { checkAccessTokenExpired(); string url = driveByIdUrl ~ driveId ~ "/items/" ~ id ~ "/content"; if (eTag) http.addRequestHeader("If-Match", eTag); return upload(localPath, url); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_update JSONValue updateById(const(char)[] driveId, const(char)[] id, JSONValue data, const(char)[] eTag = null) { checkAccessTokenExpired(); const(char)[] url = driveByIdUrl ~ driveId ~ "/items/" ~ id; if (eTag) http.addRequestHeader("If-Match", eTag); http.addRequestHeader("Content-Type", "application/json"); return patch(url, data.toString()); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_delete void deleteById(const(char)[] driveId, const(char)[] id, const(char)[] eTag = null) { checkAccessTokenExpired(); const(char)[] url = driveByIdUrl ~ driveId ~ "/items/" ~ id; //TODO: investigate why this always fail with 412 (Precondition Failed) //if (eTag) http.addRequestHeader("If-Match", eTag); del(url); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_post_children JSONValue createById(const(char)[] parentDriveId, const(char)[] parentId, JSONValue item) { checkAccessTokenExpired(); const(char)[] url = driveByIdUrl ~ parentDriveId ~ "/items/" ~ parentId ~ "/children"; http.addRequestHeader("Content-Type", "application/json"); return post(url, item.toString()); } // Return the details of the specified path JSONValue getPathDetails(const(string) path) { checkAccessTokenExpired(); const(char)[] url; // string itemByPathUrl = "https://graph.microsoft.com/v1.0/me/drive/root:/"; if ((path == ".")||(path == "/")) url = driveUrl ~ "/root/"; else url = itemByPathUrl ~ encodeComponent(path) ~ ":/"; url ~= "?select=id,name,eTag,cTag,deleted,file,folder,root,fileSystemInfo,remoteItem,parentReference,size"; return get(url); } // Return the details of the specified id // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_get JSONValue getPathDetailsById(const(char)[] driveId, const(char)[] id) { checkAccessTokenExpired(); const(char)[] url; // string driveByIdUrl = "https://graph.microsoft.com/v1.0/drives/"; url = driveByIdUrl ~ driveId ~ "/items/" ~ id; url ~= "?select=id,name,eTag,cTag,deleted,file,folder,root,fileSystemInfo,remoteItem,parentReference,size"; return get(url); } // Return the requested details of the specified id // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_get JSONValue getFileDetails(const(char)[] driveId, const(char)[] id) { checkAccessTokenExpired(); const(char)[] url; // string driveByIdUrl = "https://graph.microsoft.com/v1.0/drives/"; url = driveByIdUrl ~ driveId ~ "/items/" ~ id; url ~= "?select=size,malware,file,webUrl"; return get(url); } // https://dev.onedrive.com/items/move.htm JSONValue moveByPath(const(char)[] sourcePath, JSONValue moveData) { // Need to use itemByPathUrl checkAccessTokenExpired(); string url = itemByPathUrl ~ encodeComponent(sourcePath); http.addRequestHeader("Content-Type", "application/json"); return move(url, moveData.toString()); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_createuploadsession JSONValue createUploadSession(const(char)[] parentDriveId, const(char)[] parentId, const(char)[] filename, const(char)[] eTag = null, JSONValue item = null) { checkAccessTokenExpired(); const(char)[] url = driveByIdUrl ~ parentDriveId ~ "/items/" ~ parentId ~ ":/" ~ encodeComponent(filename) ~ ":/createUploadSession"; if (eTag) http.addRequestHeader("If-Match", eTag); http.addRequestHeader("Content-Type", "application/json"); return post(url, item.toString()); } // https://dev.onedrive.com/items/upload_large_files.htm JSONValue uploadFragment(const(char)[] uploadUrl, string filepath, long offset, long offsetSize, long fileSize) { checkAccessTokenExpired(); scope(exit) { http.clearRequestHeaders(); http.onSend = null; } http.method = HTTP.Method.put; http.url = uploadUrl; import std.conv; string contentRange = "bytes " ~ to!string(offset) ~ "-" ~ to!string(offset + offsetSize - 1) ~ "/" ~ to!string(fileSize); http.addRequestHeader("Content-Range", contentRange); auto file = File(filepath, "rb"); file.seek(offset); http.onSend = data => file.rawRead(data).length; http.contentLength = offsetSize; auto response = perform(); // TODO: retry on 5xx errors checkHttpCode(response); return response; } // https://dev.onedrive.com/items/upload_large_files.htm JSONValue requestUploadStatus(const(char)[] uploadUrl) { checkAccessTokenExpired(); // when using microsoft graph the auth code is different return get(uploadUrl, true); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/site_search?view=odsp-graph-online JSONValue o365SiteSearch(string o365SharedLibraryName){ checkAccessTokenExpired(); const(char)[] url; url = siteSearchUrl ~ "=" ~ o365SharedLibraryName; return get(url); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/drive_list?view=odsp-graph-online JSONValue o365SiteDrives(string site_id){ checkAccessTokenExpired(); const(char)[] url; url = siteDriveUrl ~ site_id ~ "/drives"; return get(url); } private void redeemToken(const(char)[] authCode) { const(char)[] postData = "client_id=" ~ clientId ~ "&redirect_uri=" ~ redirectUrl ~ "&code=" ~ authCode ~ "&grant_type=authorization_code"; acquireToken(postData); } private void newToken() { string postData = "client_id=" ~ clientId ~ "&redirect_uri=" ~ redirectUrl ~ "&refresh_token=" ~ refreshToken ~ "&grant_type=refresh_token"; acquireToken(postData); } private void acquireToken(const(char)[] postData) { JSONValue response = post(tokenUrl, postData); if (response.type() == JSONType.object) { if ("access_token" in response){ accessToken = "bearer " ~ response["access_token"].str(); refreshToken = response["refresh_token"].str(); accessTokenExpiration = Clock.currTime() + dur!"seconds"(response["expires_in"].integer()); if (!.dryRun) { std.file.write(cfg.refreshTokenFilePath, refreshToken); } if (printAccessToken) writeln("New access token: ", accessToken); } else { log.error("\nInvalid authentication response from OneDrive. Please check the response uri\n"); // re-authorize authorize(); } } else { log.vdebug("Invalid JSON response from OneDrive unable to initialize application"); } } private void checkAccessTokenExpired() { try { if (Clock.currTime() >= accessTokenExpiration) { newToken(); } } catch (OneDriveException e) { if (e.httpStatusCode == 400 || e.httpStatusCode == 401) { e.msg ~= "\nRefresh token invalid, use --logout to authorize the client again"; } } } private void addAccessTokenHeader() { http.addRequestHeader("Authorization", accessToken); } private JSONValue get(const(char)[] url, bool skipToken = false) { scope(exit) http.clearRequestHeaders(); http.method = HTTP.Method.get; http.url = url; if (!skipToken) addAccessTokenHeader(); // HACK: requestUploadStatus auto response = perform(); checkHttpCode(response); // OneDrive API Response Debugging if (.debugResponse){ writeln("OneDrive API Response: ", response); } return response; } private void del(const(char)[] url) { scope(exit) http.clearRequestHeaders(); http.method = HTTP.Method.del; http.url = url; addAccessTokenHeader(); auto response = perform(); checkHttpCode(response); } private void download(const(char)[] url, string filename, long fileSize) { // Threshold for displaying download bar long thresholdFileSize = 4 * 2^^20; // 4 MiB scope(exit) http.clearRequestHeaders(); http.method = HTTP.Method.get; http.url = url; addAccessTokenHeader(); auto f = File(filename, "wb"); http.onReceive = (ubyte[] data) { f.rawWrite(data); return data.length; }; if (fileSize >= thresholdFileSize){ // Download Progress Bar size_t iteration = 20; Progress p = new Progress(iteration); p.title = "Downloading"; writeln(); real previousDLPercent = -1.0; real percentCheck = 5.0; // Setup progress bar to display http.onProgress = delegate int(size_t dltotal, size_t dlnow, size_t ultotal, size_t ulnow) { // For each onProgress, what is the % of dlnow to dltotal real currentDLPercent = round(double(dlnow)/dltotal*100); // If matching 5% of download, increment progress bar if ((isIdentical(fmod(currentDLPercent, percentCheck), 0.0)) && (previousDLPercent != currentDLPercent)) { p.next(); previousDLPercent = currentDLPercent; } return 0; }; // Perform download & display progress bar http.perform(); writeln(); // Reset onProgress to not display anything for next download http.onProgress = delegate int(size_t dltotal, size_t dlnow, size_t ultotal, size_t ulnow) { return 0; }; } else { // No progress bar http.perform(); } // Check the HTTP response code checkHttpCode(); } private auto patch(T)(const(char)[] url, const(T)[] patchData) { scope(exit) http.clearRequestHeaders(); http.method = HTTP.Method.patch; http.url = url; addAccessTokenHeader(); auto response = perform(patchData); checkHttpCode(response); return response; } private auto post(T)(const(char)[] url, const(T)[] postData) { scope(exit) http.clearRequestHeaders(); http.method = HTTP.Method.post; http.url = url; addAccessTokenHeader(); auto response = perform(postData); checkHttpCode(response); return response; } private auto move(T)(const(char)[] url, const(T)[] postData) { scope(exit) http.clearRequestHeaders(); http.method = HTTP.Method.patch; http.url = url; addAccessTokenHeader(); auto response = perform(postData); checkHttpCode(); return response; } private JSONValue upload(string filepath, string url) { scope(exit) { http.clearRequestHeaders(); http.onSend = null; http.contentLength = 0; } http.method = HTTP.Method.put; http.url = url; addAccessTokenHeader(); http.addRequestHeader("Content-Type", "application/octet-stream"); auto file = File(filepath, "rb"); http.onSend = data => file.rawRead(data).length; http.contentLength = file.size; auto response = perform(); checkHttpCode(response); return response; } private JSONValue perform(const(void)[] sendData) { scope(exit) { http.onSend = null; http.contentLength = 0; } if (sendData) { http.contentLength = sendData.length; http.onSend = (void[] buf) { import std.algorithm: min; size_t minLen = min(buf.length, sendData.length); if (minLen == 0) return 0; buf[0 .. minLen] = sendData[0 .. minLen]; sendData = sendData[minLen .. $]; return minLen; }; } else { http.onSend = buf => 0; } return perform(); } private JSONValue perform() { scope(exit) http.onReceive = null; char[] content; http.onReceive = (ubyte[] data) { content ~= data; // HTTP Server Response Code Debugging if (.debugResponse){ writeln("OneDrive HTTP Server Response: ", http.statusLine.code); } return data.length; }; JSONValue json; try { http.perform(); } catch (CurlException e) { // Parse and display error message received from OneDrive log.error("ERROR: OneDrive returned an error with the following message:"); auto errorArray = splitLines(e.msg); string errorMessage = errorArray[0]; if (canFind(errorMessage, "Couldn't connect to server on handle") || canFind(errorMessage, "Couldn't resolve host name on handle")) { // This is a curl timeout log.error(" Error Message: There was a timeout in accessing the Microsoft OneDrive service - Internet connectivity issue?"); // or 408 request timeout // https://github.com/abraunegg/onedrive/issues/694 // Back off & retry with incremental delay int retryCount = 10000; int retryAttempts = 1; int backoffInterval = 1; int maxBackoffInterval = 3600; bool retrySuccess = false; while (!retrySuccess){ backoffInterval++; log.vdebug(" Retry Attempt: ", retryAttempts); int thisBackOffInterval = retryAttempts*backoffInterval; if (thisBackOffInterval <= maxBackoffInterval) { Thread.sleep(dur!"seconds"(thisBackOffInterval)); } else { Thread.sleep(dur!"seconds"(maxBackoffInterval)); } try { http.perform(); // no error from http.perform() on re-try log.log("Internet connectivity to Microsoft OneDrive service has been restored"); retrySuccess = true; } catch (CurlException e) { if (canFind(e.msg, "Couldn't connect to server on handle") || canFind(e.msg, "Couldn't resolve host name on handle")) { log.error(" Error Message: There was a timeout in accessing the Microsoft OneDrive service - Internet connectivity issue?"); // Increment & loop around retryAttempts++; } if (retryAttempts == retryCount) { // we have attempted to re-connect X number of times // false set this to true to break out of while loop retrySuccess = true; } } } if (retryAttempts >= retryCount) { log.error(" Error Message: Was unable to reconnect to the Microsoft OneDrive service after 10000 attempts lasting over 1.2 years!"); throw new OneDriveException(408, "Request Timeout - HTTP 408 or Internet down?"); } } else { // Some other error was returned log.error(" Error Message: ", errorMessage); } // return an empty JSON for handling return json; } try { json = content.parseJSON(); } catch (JSONException e) { // Log that a JSON Exception was caught, dont output the HTML response from OneDrive log.vdebug("JSON Exception caught when performing HTTP operations - use --debug-https to diagnose further"); } return json; } private void checkHttpCode() { // https://dev.onedrive.com/misc/errors.htm // https://developer.overdrive.com/docs/reference-guide /* HTTP/1.1 Response handling Errors in the OneDrive API are returned using standard HTTP status codes, as well as a JSON error response object. The following HTTP status codes should be expected. Status code Status message Description 200 OK Request was handled OK 201 Created This means you've made a successful POST to checkout, lock in a format, or place a hold 204 No Content This means you've made a successful DELETE to remove a hold or return a title 400 Bad Request Cannot process the request because it is malformed or incorrect. 401 Unauthorized Required authentication information is either missing or not valid for the resource. 403 Forbidden Access is denied to the requested resource. The user might not have enough permission. 404 Not Found The requested resource doesn’t exist. 405 Method Not Allowed The HTTP method in the request is not allowed on the resource. 406 Not Acceptable This service doesn’t support the format requested in the Accept header. 408 Request Time out Not expected from OneDrive, but can be used to handle Internet connection failures the same (fallback and try again) 409 Conflict The current state conflicts with what the request expects. For example, the specified parent folder might not exist. 410 Gone The requested resource is no longer available at the server. 411 Length Required A Content-Length header is required on the request. 412 Precondition Failed A precondition provided in the request (such as an if-match header) does not match the resource's current state. 413 Request Entity Too Large The request size exceeds the maximum limit. 415 Unsupported Media Type The content type of the request is a format that is not supported by the service. 416 Requested Range Not Satisfiable The specified byte range is invalid or unavailable. 422 Unprocessable Entity Cannot process the request because it is semantically incorrect. 429 Too Many Requests Client application has been throttled and should not attempt to repeat the request until an amount of time has elapsed. 500 Internal Server Error There was an internal server error while processing the request. 501 Not Implemented The requested feature isn’t implemented. 502 Bad Gateway The service was unreachable 503 Service Unavailable The service is temporarily unavailable. You may repeat the request after a delay. There may be a Retry-After header. 507 Insufficient Storage The maximum storage quota has been reached. 509 Bandwidth Limit Exceeded Your app has been throttled for exceeding the maximum bandwidth cap. Your app can retry the request again after more time has elapsed. HTTP/2 Response handling 0 OK */ switch(http.statusLine.code) { // 0 - OK ... HTTP2 version of 200 OK case 0: break; // 200 - OK case 200: // No Log .. break; // 201 - Created OK // 202 - Accepted // 204 - Deleted OK case 201,202,204: // No actions, but log if verbose logging //log.vlog("OneDrive Response: '", http.statusLine.code, " - ", http.statusLine.reason, "'"); break; // 302 - resource found and available at another location, redirect case 302: break; // 400 - Bad Request case 400: // Bad Request .. how should we act? log.vlog("OneDrive returned a 'HTTP 400 - Bad Request' - gracefully handling error"); break; // 403 - Forbidden case 403: // OneDrive responded that the user is forbidden log.vlog("OneDrive returned a 'HTTP 403 - Forbidden' - gracefully handling error"); break; // 404 - Item not found case 404: // Item was not found - do not throw an exception log.vlog("OneDrive returned a 'HTTP 404 - Item not found' - gracefully handling error"); break; // 408 - Request Timeout case 408: // Request to connect to OneDrive service timed out log.vlog("Request Timeout - gracefully handling error"); throw new OneDriveException(408, "Request Timeout - HTTP 408 or Internet down?"); // 409 - Conflict case 409: // Conflict handling .. how should we act? This only really gets triggered if we are using --local-first & we remove items.db as the DB thinks the file is not uploaded but it is log.vlog("OneDrive returned a 'HTTP 409 - Conflict' - gracefully handling error"); break; // 412 - Precondition Failed case 412: // A precondition provided in the request (such as an if-match header) does not match the resource's current state. log.vlog("OneDrive returned a 'HTTP 412 - Precondition Failed' - gracefully handling error"); break; // 415 - Unsupported Media Type case 415: // Unsupported Media Type ... sometimes triggered on image files, especially PNG log.vlog("OneDrive returned a 'HTTP 415 - Unsupported Media Type' - gracefully handling error"); break; // 429 - Too Many Requests case 429: // Too many requests in a certain time window // https://docs.microsoft.com/en-us/sharepoint/dev/general-development/how-to-avoid-getting-throttled-or-blocked-in-sharepoint-online log.vlog("OneDrive returned a 'HTTP 429 - Too Many Requests' - gracefully handling error"); break; // Server side (OneDrive) Errors // 500 - Internal Server Error // 502 - Bad Gateway // 503 - Service Unavailable // 504 - Gateway Timeout (Issue #320) case 500: // No actions log.vlog("OneDrive returned a 'HTTP 500 Internal Server Error' - gracefully handling error"); break; case 502: // No actions log.vlog("OneDrive returned a 'HTTP 502 Bad Gateway Error' - gracefully handling error"); break; case 503: // No actions log.vlog("OneDrive returned a 'HTTP 503 Service Unavailable Error' - gracefully handling error"); break; case 504: // No actions log.vlog("OneDrive returned a 'HTTP 504 Gateway Timeout Error' - gracefully handling error"); break; // "else" default: throw new OneDriveException(http.statusLine.code, http.statusLine.reason); } } private void checkHttpCode(ref const JSONValue response) { switch(http.statusLine.code) { // 0 - OK ... HTTP2 version of 200 OK case 0: break; // 200 - OK case 200: // No Log .. break; // 201 - Created OK // 202 - Accepted // 204 - Deleted OK case 201,202,204: // No actions, but log if verbose logging //log.vlog("OneDrive Response: '", http.statusLine.code, " - ", http.statusLine.reason, "'"); break; // 302 - resource found and available at another location, redirect case 302: break; // 400 - Bad Request case 400: // Bad Request .. how should we act? log.vlog("OneDrive returned a 'HTTP 400 - Bad Request' - gracefully handling error"); // make sure this is thrown so that it is caught throw new OneDriveException(http.statusLine.code, http.statusLine.reason, response); // 403 - Forbidden case 403: // OneDrive responded that the user is forbidden log.vlog("OneDrive returned a 'HTTP 403 - Forbidden' - gracefully handling error"); // Throw this as a specific exception so this is caught when performing sync.o365SiteSearch throw new OneDriveException(http.statusLine.code, http.statusLine.reason, response); // 412 - Precondition Failed case 412: // Throw this as a specific exception so this is caught when performing sync.uploadLastModifiedTime throw new OneDriveException(http.statusLine.code, http.statusLine.reason, response); // Server side (OneDrive) Errors // 500 - Internal Server Error // 502 - Bad Gateway // 503 - Service Unavailable // 504 - Gateway Timeout (Issue #320) case 500: // Throw this as a specific exception so this is caught throw new OneDriveException(http.statusLine.code, http.statusLine.reason, response); case 502: // Throw this as a specific exception so this is caught throw new OneDriveException(http.statusLine.code, http.statusLine.reason, response); case 503: // Throw this as a specific exception so this is caught throw new OneDriveException(http.statusLine.code, http.statusLine.reason, response); case 504: // Throw this as a specific exception so this is caught throw new OneDriveException(http.statusLine.code, http.statusLine.reason, response); // Default - all other errors that are not a 2xx or a 302 default: if (http.statusLine.code / 100 != 2 && http.statusLine.code != 302) { throw new OneDriveException(http.statusLine.code, http.statusLine.reason, response); } } } } unittest { string configDirName = expandTilde("~/.config/onedrive"); auto cfg = new config.Config(configDirName); cfg.init(); OneDriveApi onedrive = new OneDriveApi(cfg); onedrive.init(); std.file.write("/tmp/test", "test"); // simpleUpload auto item = onedrive.simpleUpload("/tmp/test", "/test"); try { item = onedrive.simpleUpload("/tmp/test", "/test"); } catch (OneDriveException e) { assert(e.httpStatusCode == 409); } try { item = onedrive.simpleUpload("/tmp/test", "/test", "123"); } catch (OneDriveException e) { assert(e.httpStatusCode == 412); } item = onedrive.simpleUpload("/tmp/test", "/test", item["eTag"].str); // deleteById try { onedrive.deleteById(item["id"].str, "123"); } catch (OneDriveException e) { assert(e.httpStatusCode == 412); } onedrive.deleteById(item["id"].str, item["eTag"].str); onedrive.http.shutdown(); } onedrive-2.3.13/src/progress.d000066400000000000000000000065441360252424000162220ustar00rootroot00000000000000module progress; import std.stdio; import std.range; import std.format; import std.datetime; import core.sys.posix.unistd; import core.sys.posix.sys.ioctl; class Progress { private: immutable static size_t default_width = 80; size_t max_width = 40; size_t width = default_width; ulong start_time; string caption = "Progress"; size_t iterations; size_t counter; size_t getTerminalWidth() { size_t column = default_width; version (CRuntime_Musl) { } else { winsize ws; if(ioctl(STDOUT_FILENO, TIOCGWINSZ, &ws) != -1 && ws.ws_col > 0) { column = ws.ws_col; } } return column; } void clear() { write("\r"); for(auto i = 0; i < width; i++) write(" "); write("\r"); } int calc_eta() { immutable auto ratio = cast(double)counter / iterations; auto current_time = Clock.currTime.toUnixTime(); auto duration = cast(int)(current_time - start_time); int hours, minutes, seconds; double elapsed = (current_time - start_time); int eta_sec = cast(int)((elapsed / ratio) - elapsed); // Return an ETA or Duration? if (eta_sec != 0){ return eta_sec; } else { return duration; } } string progressbarText(string header_text, string footer_text) { immutable auto ratio = cast(double)counter / iterations; string result = ""; double bar_length = width - header_text.length - footer_text.length; if(bar_length > max_width && max_width > 0) { bar_length = max_width; } size_t i = 0; for(; i < ratio * bar_length; i++) result ~= "o"; for(; i < bar_length; i++) result ~= " "; return header_text ~ result ~ footer_text; } void print() { immutable auto ratio = cast(double)counter / iterations; auto header = appender!string(); auto footer = appender!string(); header.formattedWrite("%s %3d%% |", caption, cast(int)(ratio * 100)); if(counter <= 1 || ratio == 0.0) { footer.formattedWrite("| ETA --:--:--:"); } else { int h, m, s; dur!"seconds"(calc_eta()) .split!("hours", "minutes", "seconds")(h, m, s); if (counter != iterations){ footer.formattedWrite("| ETA %02d:%02d:%02d ", h, m, s); } else { footer.formattedWrite("| DONE IN %02d:%02d:%02d ", h, m, s); } } write(progressbarText(header.data, footer.data)); } void update() { width = getTerminalWidth(); clear(); print(); stdout.flush(); } public: this(size_t iterations) { if(iterations <= 0) iterations = 1; counter = 0; this.iterations = iterations; start_time = Clock.currTime.toUnixTime; } @property { string title() { return caption; } string title(string text) { return caption = text; } } @property { size_t count() { return counter; } size_t count(size_t val) { if(val > iterations) val = iterations; return counter = val; } } @property { size_t maxWidth() { return max_width; } size_t maxWidth(size_t w) { return max_width = w; } } void reset() { counter = 0; start_time = Clock.currTime.toUnixTime; } void next() { counter++; if(counter > iterations) counter = iterations; update(); } } onedrive-2.3.13/src/qxor.d000066400000000000000000000050771360252424000153470ustar00rootroot00000000000000import std.algorithm; import std.digest.digest; // implementation of the QuickXorHash algorithm in D // https://github.com/OneDrive/onedrive-api-docs/blob/live/docs/code-snippets/quickxorhash.md struct QuickXor { private immutable int widthInBits = 160; private immutable size_t lengthInBytes = (widthInBits - 1) / 8 + 1; private immutable size_t lengthInQWords = (widthInBits - 1) / 64 + 1; private immutable int bitsInLastCell = widthInBits % 64; // 32 private immutable int shift = 11; private ulong[lengthInQWords] _data; private ulong _lengthSoFar; private int _shiftSoFar; nothrow @safe void put(scope const(ubyte)[] array...) { int vectorArrayIndex = _shiftSoFar / 64; int vectorOffset = _shiftSoFar % 64; immutable size_t iterations = min(array.length, widthInBits); for (size_t i = 0; i < iterations; i++) { immutable bool isLastCell = vectorArrayIndex == _data.length - 1; immutable int bitsInVectorCell = isLastCell ? bitsInLastCell : 64; if (vectorOffset <= bitsInVectorCell - 8) { for (size_t j = i; j < array.length; j += widthInBits) { _data[vectorArrayIndex] ^= cast(ulong) array[j] << vectorOffset; } } else { int index1 = vectorArrayIndex; int index2 = isLastCell ? 0 : (vectorArrayIndex + 1); ubyte low = cast(ubyte) (bitsInVectorCell - vectorOffset); ubyte xoredByte = 0; for (size_t j = i; j < array.length; j += widthInBits) { xoredByte ^= array[j]; } _data[index1] ^= cast(ulong) xoredByte << vectorOffset; _data[index2] ^= cast(ulong) xoredByte >> low; } vectorOffset += shift; if (vectorOffset >= bitsInVectorCell) { vectorArrayIndex = isLastCell ? 0 : vectorArrayIndex + 1; vectorOffset -= bitsInVectorCell; } } _shiftSoFar = cast(int) (_shiftSoFar + shift * (array.length % widthInBits)) % widthInBits; _lengthSoFar += array.length; } nothrow @safe void start() { _data = _data.init; _shiftSoFar = 0; _lengthSoFar = 0; } nothrow @trusted ubyte[lengthInBytes] finish() { ubyte[lengthInBytes] tmp; tmp[0 .. lengthInBytes] = (cast(ubyte*) _data)[0 .. lengthInBytes]; for (size_t i = 0; i < 8; i++) { tmp[lengthInBytes - 8 + i] ^= (cast(ubyte*) &_lengthSoFar)[i]; } return tmp; } } unittest { assert(isDigest!QuickXor); } unittest { QuickXor qxor; qxor.put(cast(ubyte[]) "The quick brown fox jumps over the lazy dog"); assert(qxor.finish().toHexString() == "6CC4A56F2B26C492FA4BBE57C1F31C4193A972BE"); } alias QuickXorDigest = WrapperDigest!(QuickXor); onedrive-2.3.13/src/selective.d000066400000000000000000000066651360252424000163450ustar00rootroot00000000000000import std.algorithm; import std.array; import std.file; import std.path; import std.regex; import std.stdio; import util; final class SelectiveSync { private string[] paths; private Regex!char mask; private Regex!char dirmask; void load(string filepath) { if (exists(filepath)) { paths = File(filepath) .byLine() .map!(a => buildNormalizedPath(a)) .filter!(a => a.length > 0) .array; } } void setFileMask(const(char)[] mask) { this.mask = wild2regex(mask); } void setDirMask(const(char)[] dirmask) { this.dirmask = wild2regex(dirmask); } // config file skip_dir parameter bool isDirNameExcluded(string name) { // Does the directory name match skip_dir config entry? // Returns true if the name matches a skip_dir config entry // Returns false if no match // Try full path match first if (!name.matchFirst(dirmask).empty) { return true; } else { // check just the file name string filename = baseName(name); if(!filename.matchFirst(dirmask).empty) { return true; } } // no match return false; } // config file skip_file parameter bool isFileNameExcluded(string name) { // Does the file name match skip_file config entry? // Returns true if the name matches a skip_file config entry // Returns false if no match // Try full path match first if (!name.matchFirst(mask).empty) { return true; } else { // check just the file name string filename = baseName(name); if(!filename.matchFirst(mask).empty) { return true; } } // no match return false; } // Match against sync_list only bool isPathExcludedViaSyncList(string path) { return .isPathExcluded(path, paths); } // Match against skip_dir, skip_file & sync_list entries bool isPathExcludedMatchAll(string path) { return .isPathExcluded(path, paths) || .isPathMatched(path, mask) || .isPathMatched(path, dirmask); } } // test if the given path is not included in the allowed paths // if there are no allowed paths always return false private bool isPathExcluded(string path, string[] allowedPaths) { // always allow the root if (path == ".") return false; // if there are no allowed paths always return false if (allowedPaths.empty) return false; path = buildNormalizedPath(path); foreach (allowed; allowedPaths) { auto comm = commonPrefix(path, allowed); if (comm.length == path.length) { // the given path is contained in an allowed path return false; } if (comm.length == allowed.length && path[comm.length] == '/') { // the given path is a subitem of an allowed path return false; } } return true; } // test if the given path is matched by the regex expression. // recursively test up the tree. private bool isPathMatched(string path, Regex!char mask) { path = buildNormalizedPath(path); auto paths = pathSplitter(path); string prefix = ""; foreach(base; paths) { prefix ~= base; if (!path.matchFirst(mask).empty) { // the given path matches something which we should skip return true; } prefix ~= dirSeparator; } return false; } unittest { assert(isPathExcluded("Documents2", ["Documents"])); assert(!isPathExcluded("Documents", ["Documents"])); assert(!isPathExcluded("Documents/a.txt", ["Documents"])); assert(isPathExcluded("Hello/World", ["Hello/John"])); assert(!isPathExcluded(".", ["Documents"])); } onedrive-2.3.13/src/sqlite.d000066400000000000000000000127521360252424000156550ustar00rootroot00000000000000module sqlite; import std.stdio; import etc.c.sqlite3; import std.string: fromStringz, toStringz; import core.stdc.stdlib; import std.conv; static import log; extern (C) immutable(char)* sqlite3_errstr(int); // missing from the std library static this() { if (sqlite3_libversion_number() < 3006019) { throw new SqliteException("sqlite 3.6.19 or newer is required"); } } private string ifromStringz(const(char)* cstr) { return fromStringz(cstr).dup; } class SqliteException: Exception { @safe pure nothrow this(string msg, string file = __FILE__, size_t line = __LINE__, Throwable next = null) { super(msg, file, line, next); } @safe pure nothrow this(string msg, Throwable next, string file = __FILE__, size_t line = __LINE__) { super(msg, file, line, next); } } struct Database { private sqlite3* pDb; this(const(char)[] filename) { open(filename); } ~this() { close(); } int db_checkpoint() { return sqlite3_wal_checkpoint(pDb, null); } void dump_open_statements() { log.log("Dumpint open statements: \n"); auto p = sqlite3_next_stmt(pDb, null); while (p != null) { log.log (" - " ~ ifromStringz(sqlite3_sql(p)) ~ "\n"); p = sqlite3_next_stmt(pDb, p); } } void open(const(char)[] filename) { // https://www.sqlite.org/c3ref/open.html int rc = sqlite3_open(toStringz(filename), &pDb); if (rc == SQLITE_CANTOPEN) { // Database cannot be opened log.error("\nThe database cannot be opened. Please check the permissions of ~/.config/onedrive/items.sqlite3\n"); close(); exit(-1); } if (rc != SQLITE_OK) { log.error("\nA database access error occurred: " ~ getErrorMessage() ~ "\n"); close(); exit(-1); } sqlite3_extended_result_codes(pDb, 1); // always use extended result codes } void exec(const(char)[] sql) { // https://www.sqlite.org/c3ref/exec.html int rc = sqlite3_exec(pDb, toStringz(sql), null, null, null); if (rc != SQLITE_OK) { log.error("\nA database execution error occurred: "~ getErrorMessage() ~ "\n"); close(); exit(-1); } } int getVersion() { int userVersion; extern (C) int callback(void* user_version, int count, char** column_text, char** column_name) { import core.stdc.stdlib: atoi; *(cast(int*) user_version) = atoi(*column_text); return 0; } int rc = sqlite3_exec(pDb, "PRAGMA user_version", &callback, &userVersion, null); if (rc != SQLITE_OK) { throw new SqliteException(ifromStringz(sqlite3_errmsg(pDb))); } return userVersion; } string getErrorMessage() { return ifromStringz(sqlite3_errmsg(pDb)); } void setVersion(int userVersion) { import std.conv: to; exec("PRAGMA user_version=" ~ to!string(userVersion)); } Statement prepare(const(char)[] zSql) { Statement s; // https://www.sqlite.org/c3ref/prepare.html int rc = sqlite3_prepare_v2(pDb, zSql.ptr, cast(int) zSql.length, &s.pStmt, null); if (rc != SQLITE_OK) { throw new SqliteException(ifromStringz(sqlite3_errmsg(pDb))); } return s; } void close() { // https://www.sqlite.org/c3ref/close.html sqlite3_close_v2(pDb); pDb = null; } } struct Statement { struct Result { private sqlite3_stmt* pStmt; private const(char)[][] row; private this(sqlite3_stmt* pStmt) { this.pStmt = pStmt; step(); // initialize the range } @property bool empty() { return row.length == 0; } @property auto front() { return row; } alias step popFront; void step() { // https://www.sqlite.org/c3ref/step.html int rc = sqlite3_step(pStmt); if (rc == SQLITE_BUSY) { // Database is locked by another onedrive process log.error("The database is currently locked by another process - cannot sync"); return; } if (rc == SQLITE_DONE) { row.length = 0; } else if (rc == SQLITE_ROW) { // https://www.sqlite.org/c3ref/data_count.html int count = sqlite3_data_count(pStmt); row = new const(char)[][count]; foreach (size_t i, ref column; row) { // https://www.sqlite.org/c3ref/column_blob.html column = fromStringz(sqlite3_column_text(pStmt, to!int(i))); } } else { string errorMessage = ifromStringz(sqlite3_errmsg(sqlite3_db_handle(pStmt))); log.error("\nA database statement execution error occurred: "~ errorMessage ~ "\n"); exit(-1); } } } private sqlite3_stmt* pStmt; ~this() { // https://www.sqlite.org/c3ref/finalize.html sqlite3_finalize(pStmt); } void bind(int index, const(char)[] value) { reset(); // https://www.sqlite.org/c3ref/bind_blob.html int rc = sqlite3_bind_text(pStmt, index, value.ptr, cast(int) value.length, SQLITE_STATIC); if (rc != SQLITE_OK) { throw new SqliteException(ifromStringz(sqlite3_errmsg(sqlite3_db_handle(pStmt)))); } } Result exec() { reset(); return Result(pStmt); } private void reset() { // https://www.sqlite.org/c3ref/reset.html int rc = sqlite3_reset(pStmt); if (rc != SQLITE_OK) { throw new SqliteException(ifromStringz(sqlite3_errmsg(sqlite3_db_handle(pStmt)))); } } } unittest { auto db = Database(":memory:"); db.exec("CREATE TABLE test( id TEXT PRIMARY KEY, value TEXT )"); assert(db.getVersion() == 0); db.setVersion(1); assert(db.getVersion() == 1); auto s = db.prepare("INSERT INTO test VALUES (?, ?)"); s.bind(1, "key1"); s.bind(2, "value"); s.exec(); s.bind(1, "key2"); s.bind(2, null); s.exec(); s = db.prepare("SELECT * FROM test ORDER BY id ASC"); auto r = s.exec(); assert(r.front[0] == "key1"); r.popFront(); assert(r.front[1] == null); r.popFront(); assert(r.empty); } onedrive-2.3.13/src/sync.d000066400000000000000000004314331360252424000153310ustar00rootroot00000000000000import std.algorithm; import std.array: array; import std.datetime; import std.exception: enforce; import std.file, std.json, std.path; import std.regex; import std.stdio, std.string, std.uni, std.uri; import std.conv; import std.encoding; import core.time, core.thread; import core.stdc.stdlib; import config, itemdb, onedrive, selective, upload, util; static import log; // threshold after which files will be uploaded using an upload session private long thresholdFileSize = 4 * 2^^20; // 4 MiB // flag to set whether local files should be deleted private bool noRemoteDelete = false; // flag to set if we are running as uploadOnly private bool uploadOnly = false; // Do we configure to disable the upload validation routine private bool disableUploadValidation = false; private bool isItemFolder(const ref JSONValue item) { return ("folder" in item) != null; } private bool isItemFile(const ref JSONValue item) { return ("file" in item) != null; } private bool isItemDeleted(const ref JSONValue item) { return ("deleted" in item) != null; } private bool isItemRoot(const ref JSONValue item) { return ("root" in item) != null; } private bool isItemRemote(const ref JSONValue item) { return ("remoteItem" in item) != null; } private bool hasParentReference(const ref JSONValue item) { return ("parentReference" in item) != null; } private bool hasParentReferenceId(const ref JSONValue item) { return ("id" in item["parentReference"]) != null; } private bool hasParentReferencePath(const ref JSONValue item) { return ("path" in item["parentReference"]) != null; } private bool isMalware(const ref JSONValue item) { return ("malware" in item) != null; } private bool hasFileSize(const ref JSONValue item) { return ("size" in item) != null; } private bool hasId(const ref JSONValue item) { return ("id" in item) != null; } private bool hasHashes(const ref JSONValue item) { return ("hashes" in item["file"]) != null; } private bool hasQuickXorHash(const ref JSONValue item) { return ("quickXorHash" in item["file"]["hashes"]) != null; } private bool hasSha1Hash(const ref JSONValue item) { return ("sha1Hash" in item["file"]["hashes"]) != null; } private bool isDotFile(string path) { // always allow the root if (path == ".") return false; path = buildNormalizedPath(path); auto paths = pathSplitter(path); foreach(base; paths) { if (startsWith(base, ".")){ return true; } } return false; } // construct an Item struct from a JSON driveItem private Item makeItem(const ref JSONValue driveItem) { Item item = { id: driveItem["id"].str, name: "name" in driveItem ? driveItem["name"].str : null, // name may be missing for deleted files in OneDrive Biz eTag: "eTag" in driveItem ? driveItem["eTag"].str : null, // eTag is not returned for the root in OneDrive Biz cTag: "cTag" in driveItem ? driveItem["cTag"].str : null, // cTag is missing in old files (and all folders in OneDrive Biz) }; // OneDrive API Change: https://github.com/OneDrive/onedrive-api-docs/issues/834 // OneDrive no longer returns lastModifiedDateTime if the item is deleted by OneDrive if(isItemDeleted(driveItem)){ // Set mtime to SysTime(0) item.mtime = SysTime(0); } else { // Item is not in a deleted state // Resolve 'Key not found: fileSystemInfo' when then item is a remote item // https://github.com/abraunegg/onedrive/issues/11 if (isItemRemote(driveItem)) { item.mtime = SysTime.fromISOExtString(driveItem["remoteItem"]["fileSystemInfo"]["lastModifiedDateTime"].str); } else { item.mtime = SysTime.fromISOExtString(driveItem["fileSystemInfo"]["lastModifiedDateTime"].str); } } if (isItemFile(driveItem)) { item.type = ItemType.file; } else if (isItemFolder(driveItem)) { item.type = ItemType.dir; } else if (isItemRemote(driveItem)) { item.type = ItemType.remote; } else { // do not throw exception, item will be removed in applyDifferences() } // root and remote items do not have parentReference if (!isItemRoot(driveItem) && ("parentReference" in driveItem) != null) { item.driveId = driveItem["parentReference"]["driveId"].str; if (hasParentReferenceId(driveItem)) { item.parentId = driveItem["parentReference"]["id"].str; } } // extract the file hash if (isItemFile(driveItem) && ("hashes" in driveItem["file"])) { if ("crc32Hash" in driveItem["file"]["hashes"]) { item.crc32Hash = driveItem["file"]["hashes"]["crc32Hash"].str; } else if ("sha1Hash" in driveItem["file"]["hashes"]) { item.sha1Hash = driveItem["file"]["hashes"]["sha1Hash"].str; } else if ("quickXorHash" in driveItem["file"]["hashes"]) { item.quickXorHash = driveItem["file"]["hashes"]["quickXorHash"].str; } else { log.vlog("The file does not have any hash"); } } if (isItemRemote(driveItem)) { item.remoteDriveId = driveItem["remoteItem"]["parentReference"]["driveId"].str; item.remoteId = driveItem["remoteItem"]["id"].str; } return item; } private bool testFileHash(string path, const ref Item item) { if (item.crc32Hash) { if (item.crc32Hash == computeCrc32(path)) return true; } else if (item.sha1Hash) { if (item.sha1Hash == computeSha1Hash(path)) return true; } else if (item.quickXorHash) { if (item.quickXorHash == computeQuickXorHash(path)) return true; } return false; } class SyncException: Exception { @nogc @safe pure nothrow this(string msg, string file = __FILE__, size_t line = __LINE__) { super(msg, file, line); } } final class SyncEngine { private Config cfg; private OneDriveApi onedrive; private ItemDatabase itemdb; private UploadSession session; private SelectiveSync selectiveSync; // list of items to skip while applying the changes private string[] skippedItems; // list of items to delete after the changes has been downloaded private string[2][] idsToDelete; // list of items we fake created when running --dry-run private string[2][] idsFaked; // default drive id private string defaultDriveId; // default root id private string defaultRootId; // type of OneDrive account private string accountType; // free space remaining at init() private long remainingFreeSpace; // file size limit for a new file private long newSizeLimit; // is file malware flag private bool malwareDetected = false; // download filesystem issue flag private bool downloadFailed = false; // upload failure - OneDrive or filesystem issue (reading data) private bool uploadFailed = false; // initialization has been done private bool initDone = false; // sync engine dryRun flag private bool dryRun = false; // quota details available private bool quotaAvailable = true; // sync business shared folders flag private bool syncBusinessFolders = false; // single directory scope flag private bool singleDirectoryScope = false; // sync_list new folder added, trigger delta scan override private bool syncListFullScanTrigger = false; this(Config cfg, OneDriveApi onedrive, ItemDatabase itemdb, SelectiveSync selectiveSync) { assert(onedrive && itemdb && selectiveSync); this.cfg = cfg; this.onedrive = onedrive; this.itemdb = itemdb; this.selectiveSync = selectiveSync; // session = UploadSession(onedrive, cfg.uploadStateFilePath); this.dryRun = cfg.getValueBool("dry_run"); this.newSizeLimit = cfg.getValueLong("skip_size") * 2^^20; this.newSizeLimit = (this.newSizeLimit == 0) ? long.max : this.newSizeLimit; } void reset() { initDone=false; } void init() { // Set accountType, defaultDriveId, defaultRootId & remainingFreeSpace once and reuse where possible JSONValue oneDriveDetails; JSONValue oneDriveRootDetails; if (initDone) { return; } session = UploadSession(onedrive, cfg.uploadStateFilePath); // Need to catch 400 or 5xx server side errors at initialization // Get Default Drive try { oneDriveDetails = onedrive.getDefaultDrive(); } catch (OneDriveException e) { if (e.httpStatusCode == 400) { // OneDrive responded with 400 error: Bad Request log.error("\nERROR: OneDrive returned a 'HTTP 400 Bad Request' - Cannot Initialize Sync Engine"); // Check this if (cfg.getValueString("drive_id").length) { log.error("ERROR: Check your 'drive_id' entry in your configuration file as it may be incorrect\n"); } // Must exit here exit(-1); } if (e.httpStatusCode == 401) { // HTTP request returned status code 401 (Unauthorized) log.error("\nERROR: OneDrive returned a 'HTTP 401 Unauthorized' - Cannot Initialize Sync Engine"); log.error("ERROR: Check your configuration as your access token may be empty or invalid\n"); // Must exit here exit(-1); } if (e.httpStatusCode >= 500) { // There was a HTTP 5xx Server Side Error log.error("ERROR: OneDrive returned a 'HTTP 5xx Server Side Error' - Cannot Initialize Sync Engine"); // Must exit here exit(-1); } } // Get Default Root try { oneDriveRootDetails = onedrive.getDefaultRoot(); } catch (OneDriveException e) { if (e.httpStatusCode == 400) { // OneDrive responded with 400 error: Bad Request log.error("\nERROR: OneDrive returned a 'HTTP 400 Bad Request' - Cannot Initialize Sync Engine"); // Check this if (cfg.getValueString("drive_id").length) { log.error("ERROR: Check your 'drive_id' entry in your configuration file as it may be incorrect\n"); } // Must exit here exit(-1); } if (e.httpStatusCode == 401) { // HTTP request returned status code 401 (Unauthorized) log.error("\nERROR: OneDrive returned a 'HTTP 401 Unauthorized' - Cannot Initialize Sync Engine"); log.error("ERROR: Check your configuration as your access token may be empty or invalid\n"); // Must exit here exit(-1); } if (e.httpStatusCode >= 500) { // There was a HTTP 5xx Server Side Error log.error("ERROR: OneDrive returned a 'HTTP 5xx Server Side Error' - Cannot Initialize Sync Engine"); // Must exit here exit(-1); } } if ((oneDriveDetails.type() == JSONType.object) && (oneDriveRootDetails.type() == JSONType.object) && (hasId(oneDriveDetails)) && (hasId(oneDriveRootDetails))) { // JSON elements are valid // Debug OneDrive Account details response log.vdebug("OneDrive Account Details: ", oneDriveDetails); log.vdebug("OneDrive Account Root Details: ", oneDriveRootDetails); // Successfully got details from OneDrive without a server side error such as 'HTTP/1.1 500 Internal Server Error' or 'HTTP/1.1 504 Gateway Timeout' accountType = oneDriveDetails["driveType"].str; defaultDriveId = oneDriveDetails["id"].str; defaultRootId = oneDriveRootDetails["id"].str; remainingFreeSpace = oneDriveDetails["quota"]["remaining"].integer; // In some cases OneDrive Business configurations 'restrict' quota details thus is empty / blank / negative value / zero if (remainingFreeSpace <= 0) { // quota details not available log.error("ERROR: OneDrive quota information is being restricted. Please fix by speaking to your OneDrive / Office 365 Administrator."); log.error("ERROR: Flagging to disable upload space checks - this MAY have undesirable results if a file cannot be uploaded due to out of space."); quotaAvailable = false; } // Display accountType, defaultDriveId, defaultRootId & remainingFreeSpace for verbose logging purposes log.vlog("Application version: ", strip(import("version"))); log.vlog("Account Type: ", accountType); log.vlog("Default Drive ID: ", defaultDriveId); log.vlog("Default Root ID: ", defaultRootId); log.vlog("Remaining Free Space: ", remainingFreeSpace); // If account type is documentLibrary - then most likely this is a SharePoint repository // and files 'may' be modified after upload. See: https://github.com/abraunegg/onedrive/issues/205 if(accountType == "documentLibrary") { setDisableUploadValidation(); } // Check the local database to ensure the OneDrive Root details are in the database checkDatabaseForOneDriveRoot(); // Check if there is an interrupted upload session if (session.restore()) { log.log("Continuing the upload session ..."); auto item = session.upload(); saveItem(item); } initDone = true; } else { // init failure initDone = false; // log why log.error("ERROR: Unable to query OneDrive to initialize application"); // Must exit here exit(-1); } } // Configure noRemoteDelete if function is called // By default, noRemoteDelete = false; // Meaning we will process local deletes to delete item on OneDrive void setNoRemoteDelete() { noRemoteDelete = true; } // Configure uploadOnly if function is called // By default, uploadOnly = false; void setUploadOnly() { uploadOnly = true; } // Configure singleDirectoryScope if function is called // By default, singleDirectoryScope = false void setSingleDirectoryScope() { singleDirectoryScope = true; } // Configure disableUploadValidation if function is called // By default, disableUploadValidation = false; // Meaning we will always validate our uploads // However, when uploading a file that can contain metadata SharePoint will associate some // metadata from the library the file is uploaded to directly in the file // which breaks this validation. See https://github.com/abraunegg/onedrive/issues/205 void setDisableUploadValidation() { disableUploadValidation = true; log.vdebug("documentLibrary account type - flagging to disable upload validation checks due to Microsoft SharePoint file modification enrichments"); } // Issue #658 Handling // If an existing folder is moved into a sync_list valid path (where it previously was out of scope due to sync_list), // then set this flag to true, so that on the second 'true-up' sync, we force a rescan of the OneDrive path to capture any 'files' void setSyncListFullScanTrigger() { syncListFullScanTrigger = true; log.vdebug("Setting syncListFullScanTrigger = true due to new folder creation request in a location that is in-scope via sync_list"); } // unset method void unsetSyncListFullScanTrigger() { syncListFullScanTrigger = false; log.vdebug("Setting syncListFullScanTrigger = false"); } // download all new changes from OneDrive void applyDifferences(bool performFullItemScan) { // Set defaults for the root folder // Use the global's as initialised via init() rather than performing unnecessary additional HTTPS calls string driveId = defaultDriveId; string rootId = defaultRootId; applyDifferences(driveId, rootId, performFullItemScan); // Check OneDrive Personal Shared Folders // https://github.com/OneDrive/onedrive-api-docs/issues/764 Item[] items = itemdb.selectRemoteItems(); foreach (item; items) { log.vdebug("------------------------------------------------------------------"); log.vlog("Syncing OneDrive Shared Folder: ", item.name); applyDifferences(item.remoteDriveId, item.remoteId, performFullItemScan); } } // download all new changes from a specified folder on OneDrive void applyDifferencesSingleDirectory(string path) { log.vlog("Getting path details from OneDrive ..."); JSONValue onedrivePathDetails; // test if the path we are going to sync from actually exists on OneDrive try { onedrivePathDetails = onedrive.getPathDetails(path); // Returns a JSON String for the OneDrive Path } catch (OneDriveException e) { if (e.httpStatusCode == 404) { // The directory was not found log.error("ERROR: The requested single directory to sync was not found on OneDrive"); return; } if (e.httpStatusCode >= 500) { // OneDrive returned a 'HTTP 5xx Server Side Error' - gracefully handling error - error message already logged return; } } // OK - the path on OneDrive should exist, get the driveId and rootId for this folder // Was the response a valid JSON Object? if (onedrivePathDetails.type() == JSONType.object) { string driveId; string folderId; if(isItemRemote(onedrivePathDetails)){ // 2 step approach: // 1. Ensure changes for the root remote path are captured // 2. Download changes specific to the remote path // root remote applyDifferences(defaultDriveId, onedrivePathDetails["id"].str, false); // remote changes driveId = onedrivePathDetails["remoteItem"]["parentReference"]["driveId"].str; // Should give something like 66d53be8a5056eca folderId = onedrivePathDetails["remoteItem"]["id"].str; // Should give something like BC7D88EC1F539DCF!107 // Apply any differences found on OneDrive for this path (download data) applyDifferences(driveId, folderId, false); } else { // use the item id as folderId folderId = onedrivePathDetails["id"].str; // Should give something like 12345ABCDE1234A1!101 // Apply any differences found on OneDrive for this path (download data) applyDifferences(defaultDriveId, folderId, false); } } else { // Log that an invalid JSON object was returned log.vdebug("onedrive.getPathDetails call returned an invalid JSON Object"); } } // make sure the OneDrive root is in our database auto checkDatabaseForOneDriveRoot() { log.vlog("Fetching details for OneDrive Root"); JSONValue rootPathDetails = onedrive.getDefaultRoot(); // Returns a JSON Value // validate object is a JSON value if (rootPathDetails.type() == JSONType.object) { // valid JSON object Item rootPathItem = makeItem(rootPathDetails); // configure driveId and rootId for the OneDrive Root // Set defaults for the root folder string driveId = rootPathDetails["parentReference"]["driveId"].str; // Should give something like 12345abcde1234a1 string rootId = rootPathDetails["id"].str; // Should give something like 12345ABCDE1234A1!101 // Query the database if (!itemdb.selectById(driveId, rootId, rootPathItem)) { log.vlog("OneDrive Root does not exist in the database. We need to add it."); applyDifference(rootPathDetails, driveId, true); log.vlog("Added OneDrive Root to the local database"); } else { log.vlog("OneDrive Root exists in the database"); } } else { // Log that an invalid JSON object was returned log.error("ERROR: Unable to query OneDrive for account details"); log.vdebug("onedrive.getDefaultRoot call returned an invalid JSON Object"); // Must exit here as we cant configure our required variables exit(-1); } } // create a directory on OneDrive without syncing auto createDirectoryNoSync(string path) { // Attempt to create the requested path within OneDrive without performing a sync log.vlog("Attempting to create the requested path within OneDrive"); // Handle the remote folder creation and updating of the local database without performing a sync uploadCreateDir(path); } // delete a directory on OneDrive without syncing auto deleteDirectoryNoSync(string path) { // Use the global's as initialised via init() rather than performing unnecessary additional HTTPS calls const(char)[] rootId = defaultRootId; // Attempt to delete the requested path within OneDrive without performing a sync log.vlog("Attempting to delete the requested path within OneDrive"); // test if the path we are going to exists on OneDrive try { onedrive.getPathDetails(path); } catch (OneDriveException e) { if (e.httpStatusCode == 404) { // The directory was not found on OneDrive - no need to delete it log.vlog("The requested directory to delete was not found on OneDrive - skipping removing the remote directory as it doesn't exist"); return; } if (e.httpStatusCode >= 500) { // OneDrive returned a 'HTTP 5xx Server Side Error' - gracefully handling error - error message already logged return; } } Item item; if (!itemdb.selectByPath(path, defaultDriveId, item)) { // this is odd .. this directory is not in the local database - just go delete it log.vlog("The requested directory to delete was not found in the local database - pushing delete request direct to OneDrive"); uploadDeleteItem(item, path); } else { // the folder was in the local database // Handle the deletion and saving any update to the local database log.vlog("The requested directory to delete was found in the local database. Processing the deletion normally"); deleteByPath(path); } } // rename a directory on OneDrive without syncing auto renameDirectoryNoSync(string source, string destination) { try { // test if the local path exists on OneDrive onedrive.getPathDetails(source); } catch (OneDriveException e) { if (e.httpStatusCode == 404) { // The directory was not found log.vlog("The requested directory to rename was not found on OneDrive"); return; } if (e.httpStatusCode >= 500) { // OneDrive returned a 'HTTP 5xx Server Side Error' - gracefully handling error - error message already logged return; } } // The OneDrive API returned a 200 OK status, so the folder exists // Rename the requested directory on OneDrive without performing a sync moveByPath(source, destination); } // download the new changes of a specific item // id is the root of the drive or a shared folder private void applyDifferences(string driveId, const(char)[] id, bool performFullItemScan) { log.vlog("Applying changes of Path ID: " ~ id); JSONValue changes; JSONValue changesAvailable; // Query the name of this folder id string syncFolderName; string syncFolderPath; string syncFolderChildPath; JSONValue idDetails = parseJSON("{}"); try { idDetails = onedrive.getPathDetailsById(driveId, id); } catch (OneDriveException e) { if (e.httpStatusCode == 404) { // id was not found - possibly a remote (shared) folder log.vlog("No details returned for given Path ID"); return; } if (e.httpStatusCode >= 500) { // OneDrive returned a 'HTTP 5xx Server Side Error' - gracefully handling error - error message already logged return; } } // validate that idDetails is a JSON value if (idDetails.type() == JSONType.object) { // Get the name of this 'Path ID' if (("id" in idDetails) != null) { // valid response from onedrive.getPathDetailsById(driveId, id) - a JSON item object present if ((idDetails["id"].str == id) && (!isItemFile(idDetails))){ // Is a Folder or Remote Folder syncFolderName = idDetails["name"].str; } // Debug output of path details as queried from OneDrive log.vdebug("OneDrive Path Details: ", idDetails); // OneDrive Personal Folder Item Reference (24/4/2019) // "@odata.context": "https://graph.microsoft.com/v1.0/$metadata#drives('66d53be8a5056eca')/items/$entity", // "cTag": "adDo2NkQ1M0JFOEE1MDU2RUNBITEwMS42MzY5MTY5NjQ1ODcwNzAwMDA", // "eTag": "aNjZENTNCRThBNTA1NkVDQSExMDEuMQ", // "fileSystemInfo": { // "createdDateTime": "2018-06-06T20:45:24.436Z", // "lastModifiedDateTime": "2019-04-24T07:09:31.29Z" // }, // "folder": { // "childCount": 3, // "view": { // "sortBy": "takenOrCreatedDateTime", // "sortOrder": "ascending", // "viewType": "thumbnails" // } // }, // "id": "66D53BE8A5056ECA!101", // "name": "root", // "parentReference": { // "driveId": "66d53be8a5056eca", // "driveType": "personal" // }, // "root": {}, // "size": 0 // OneDrive Personal Remote / Shared Folder Item Reference (4/9/2019) // "@odata.context": "https://graph.microsoft.com/v1.0/$metadata#drives('driveId')/items/$entity", // "cTag": "cTag", // "eTag": "eTag", // "id": "itemId", // "name": "shared", // "parentReference": { // "driveId": "driveId", // "driveType": "personal", // "id": "parentItemId", // "path": "/drive/root:" // }, // "remoteItem": { // "fileSystemInfo": { // "createdDateTime": "2019-01-14T18:54:43.2666667Z", // "lastModifiedDateTime": "2019-04-24T03:47:22.53Z" // }, // "folder": { // "childCount": 0, // "view": { // "sortBy": "takenOrCreatedDateTime", // "sortOrder": "ascending", // "viewType": "thumbnails" // } // }, // "id": "remoteItemId", // "parentReference": { // "driveId": "remoteDriveId", // "driveType": "personal" // "id": "id", // "name": "name", // "path": "/drives//items/:/" // }, // "size": 0, // "webUrl": "webUrl" // } // OneDrive Business Folder & Shared Folder Item Reference (24/4/2019) // "@odata.context": "https://graph.microsoft.com/v1.0/$metadata#drives('driveId')/items/$entity", // "@odata.etag": "\"{eTag},1\"", // "cTag": "\"c:{cTag},0\"", // "eTag": "\"{eTag},1\"", // "fileSystemInfo": { // "createdDateTime": "2019-04-17T04:00:43Z", // "lastModifiedDateTime": "2019-04-17T04:00:43Z" // }, // "folder": { // "childCount": 2 // }, // "id": "itemId", // "name": "shared_folder", // "parentReference": { // "driveId": "parentDriveId", // "driveType": "business", // "id": "parentId", // "path": "/drives/driveId/root:" // }, // "size": 0 // To evaluate a change received from OneDrive, this must be set correctly if (hasParentReferencePath(idDetails)) { // Path from OneDrive has a parentReference we can use log.vdebug("Item details returned contains parent reference path - potentially shared folder object"); syncFolderPath = idDetails["parentReference"]["path"].str; syncFolderChildPath = syncFolderPath ~ "/" ~ idDetails["name"].str ~ "/"; } else { // No parentReference, set these to blank log.vdebug("Item details returned no parent reference path"); syncFolderPath = ""; syncFolderChildPath = ""; } // Debug Output log.vdebug("Sync Folder Name: ", syncFolderName); log.vdebug("Sync Folder Parent Path: ", syncFolderPath); log.vdebug("Sync Folder Actual Path: ", syncFolderChildPath); } } else { // Log that an invalid JSON object was returned log.vdebug("onedrive.getPathDetailsById call returned an invalid JSON Object"); } // Issue #658 // If we are using a sync_list file, using deltaLink will actually 'miss' changes (moves & deletes) on OneDrive as using sync_list discards changes // Use the performFullItemScan boolean to control whether we perform a full object scan of use the delta link for the root folder // When using --synchronize the normal process order is: // 1. Scan OneDrive for changes // 2. Scan local folder for changes // 3. Scan OneDrive for changes // When using sync_list and performing a full scan, what this means is a full scan is performed twice, which leads to massive processing & time overheads // Control this via performFullItemScan // Get the current delta link string deltaLink = ""; string deltaLinkAvailable = itemdb.getDeltaLink(driveId, id); if (!performFullItemScan){ // performFullItemScan == false // use delta link deltaLink = deltaLinkAvailable; } for (;;) { // Due to differences in OneDrive API's between personal and business we need to get changes only from defaultRootId // If we used the 'id' passed in & when using --single-directory with a business account we get: // 'HTTP request returned status code 501 (Not Implemented): view.delta can only be called on the root.' // To view changes correctly, we need to use the correct path id for the request const(char)[] idToQuery; if (driveId == defaultDriveId) { // The drive id matches our users default drive id idToQuery = defaultRootId.dup; } else { // The drive id does not match our users default drive id // Potentially the 'path id' we are requesting the details of is a Shared Folder (remote item) // Use the 'id' that was passed in (folderId) idToQuery = id; } try { // Fetch the changes relative to the path id we want to query changes = onedrive.viewChangesById(driveId, idToQuery, deltaLink); changesAvailable = onedrive.viewChangesById(driveId, idToQuery, deltaLinkAvailable); } catch (OneDriveException e) { // OneDrive threw an error log.vdebug("OneDrive threw an error when querying for these changes:"); log.vdebug("driveId: ", driveId); log.vdebug("idToQuery: ", idToQuery); log.vdebug("deltaLink: ", deltaLink); // HTTP request returned status code 404 (Not Found) if (e.httpStatusCode == 404) { // Stop application log.log("\n\nOneDrive returned a 'HTTP 404 - Item not found'"); log.log("The item id to query was not found on OneDrive"); log.log("\nRemove your '", cfg.databaseFilePath, "' file and try to sync again\n"); return; } // HTTP request returned status code 410 (The requested resource is no longer available at the server) if (e.httpStatusCode == 410) { log.vlog("Delta link expired, re-syncing..."); deltaLink = null; continue; } // HTTP request returned status code 500 (Internal Server Error) if (e.httpStatusCode == 500) { // display what the error is displayOneDriveErrorMessage(e.msg); return; } if (e.httpStatusCode == 504) { // HTTP request returned status code 504 (Gateway Timeout) // Retry by calling applyDifferences() again log.vlog("OneDrive returned a 'HTTP 504 - Gateway Timeout' - gracefully handling error"); applyDifferences(driveId, idToQuery, performFullItemScan); } else { // Default operation if not 404, 410, 500, 504 errors // display what the error is displayOneDriveErrorMessage(e.msg); log.log("\nRemove your '", cfg.databaseFilePath, "' file and try to sync again\n"); return; } } // is changesAvailable a valid JSON response long deltaChanges = 0; if (changesAvailable.type() == JSONType.object) { // are there any delta changes? if (("value" in changesAvailable) != null) { deltaChanges = count(changesAvailable["value"].array); } } // is changes a valid JSON response if (changes.type() == JSONType.object) { // Are there any changes to process? if ((("value" in changes) != null) && ((deltaChanges > 0) || (syncListFullScanTrigger))) { auto nrChanges = count(changes["value"].array); auto changeCount = 0; if (!performFullItemScan){ // Display the number of changes we are processing // OneDrive ships 'changes' in ~200 bundles. These messages then get displayed for each bundle if (nrChanges >= cfg.getValueLong("min_notify_changes")) { // verbose log, no 'notify' .. it is over the top log.vlog("Processing ", nrChanges, " changes"); } else { // There are valid changes log.vdebug("Number of changes from OneDrive to process: ", nrChanges); } } else { // Do not display anything unless we are doing a verbose debug as due to #658 we are essentially doing a --resync each time when using sync_list // Display the number of items we are processing if (nrChanges >= cfg.getValueLong("min_notify_changes")) { // verbose log, no 'notify' .. it is over the top log.vlog("Processing ", nrChanges, " OneDrive items to ensure consistent state due to sync_list being used"); } else { // There are valid changes log.vdebug("Number of items from OneDrive to process: ", nrChanges); } // unset now the full scan trigger if set if (syncListFullScanTrigger) { unsetSyncListFullScanTrigger(); } } foreach (item; changes["value"].array) { bool isRoot = false; string thisItemPath; changeCount++; // Change as reported by OneDrive log.vdebug("------------------------------------------------------------------"); log.vdebug("Processing change ", changeCount, " of ", nrChanges); log.vdebug("OneDrive Change: ", item); // Deleted items returned from onedrive.viewChangesByItemId or onedrive.viewChangesByDriveId (/delta) do not have a 'name' attribute // Thus we cannot name check for 'root' below on deleted items if(!isItemDeleted(item)){ // This is not a deleted item log.vdebug("Not a OneDrive deleted item change"); // Test is this is the OneDrive Users Root? // Debug output of change evaluation items log.vdebug("defaultRootId = ", defaultRootId); log.vdebug("'search id' = ", id); log.vdebug("id == defaultRootId = ", (id == defaultRootId)); log.vdebug("isItemRoot(item) = ", (isItemRoot(item))); log.vdebug("item['name'].str == 'root' = ", (item["name"].str == "root")); log.vdebug("singleDirectoryScope = ", (singleDirectoryScope)); // Use the global's as initialised via init() rather than performing unnecessary additional HTTPS calls // In a --single-directory scenario however, '(id == defaultRootId) = false' for root items if ( ((id == defaultRootId) || (singleDirectoryScope)) && (isItemRoot(item)) && (item["name"].str == "root")) { // This IS a OneDrive Root item log.vdebug("Change will flagged as a 'root' item change"); isRoot = true; } } // How do we handle this change? if (isRoot || !hasParentReferenceId(item) || isItemDeleted(item)){ // Is a root item, has no id in parentReference or is a OneDrive deleted item log.vdebug("isRoot = ", isRoot); log.vdebug("!hasParentReferenceId(item) = ", (!hasParentReferenceId(item))); log.vdebug("isItemDeleted(item) = ", (isItemDeleted(item))); log.vdebug("Handling change as 'root item', has no parent reference or is a deleted item"); applyDifference(item, driveId, isRoot); } else { // What is this item's path? if (hasParentReferencePath(item)) { thisItemPath = item["parentReference"]["path"].str; } else { thisItemPath = ""; } // Business Shared Folders special case handling bool sharedFoldersSpecialCase = false; // Debug output of change evaluation items log.vdebug("'parentReference id' = ", item["parentReference"]["id"].str); log.vdebug("syncFolderName = ", syncFolderName); log.vdebug("syncFolderPath = ", syncFolderPath); log.vdebug("syncFolderChildPath = ", syncFolderChildPath); log.vdebug("thisItemId = ", item["id"].str); log.vdebug("thisItemPath = ", thisItemPath); log.vdebug("'item id' matches search 'id' = ", (item["id"].str == id)); log.vdebug("'parentReference id' matches search 'id' = ", (item["parentReference"]["id"].str == id)); log.vdebug("'thisItemPath' contains 'syncFolderChildPath' = ", (canFind(thisItemPath, syncFolderChildPath)) ); log.vdebug("'thisItemPath' contains search 'id' = ", (canFind(thisItemPath, id)) ); // Special case handling // - IF we are syncing shared folders, and the shared folder is not the 'top level' folder being shared out // canFind(thisItemPath, syncFolderChildPath) will never match: // Syncing this OneDrive Business Shared Folder: MyFolderName // OneDrive Business Shared By: Firstname Lastname (email@address) // Applying changes of Path ID: pathId // [DEBUG] Sync Folder Name: MyFolderName // [DEBUG] Sync Folder Path: /drives/driveId/root:/TopLevel/ABCD // [DEBUG] Sync Folder Child Path: /drives/driveId/root:/TopLevel/ABCD/MyFolderName/ // ... // [DEBUG] 'item id' matches search 'id' = false // [DEBUG] 'parentReference id' matches search 'id' = false // [DEBUG] 'thisItemPath' contains 'syncFolderChildPath' = false // [DEBUG] 'thisItemPath' contains search 'id' = false // [DEBUG] Change does not match any criteria to apply // Remote change discarded - not in business shared folders sync scope if ((!canFind(thisItemPath, syncFolderChildPath)) && (syncBusinessFolders)) { // Syncing Shared Business folders & we dont have a path match // is this a reverse path match? log.vdebug("'thisItemPath' contains 'syncFolderName' = ", (canFind(thisItemPath, syncFolderName)) ); if (canFind(thisItemPath, syncFolderName)) { sharedFoldersSpecialCase = true; } } // Check this item's path to see if this is a change on the path we want: // 1. 'item id' matches 'id' // 2. 'parentReference id' matches 'id' // 3. 'item path' contains 'syncFolderChildPath' // 4. 'item path' contains 'id' if ( (item["id"].str == id) || (item["parentReference"]["id"].str == id) || (canFind(thisItemPath, syncFolderChildPath)) || (canFind(thisItemPath, id)) || (sharedFoldersSpecialCase) ){ // This is a change we want to apply if (!sharedFoldersSpecialCase) { log.vdebug("Change matches search criteria to apply"); } else { log.vdebug("Change matches search criteria to apply - special case criteria - reverse path matching used"); } // Apply OneDrive change applyDifference(item, driveId, isRoot); } else { // No item ID match or folder sync match log.vdebug("Change does not match any criteria to apply"); // Before discarding change - does this ID still exist on OneDrive - as in IS this // potentially a --single-directory sync and the user 'moved' the file out of the 'sync-dir' to another OneDrive folder // This is a corner edge case - https://github.com/skilion/onedrive/issues/341 JSONValue oneDriveMovedNotDeleted; try { oneDriveMovedNotDeleted = onedrive.getPathDetailsById(driveId, item["id"].str); } catch (OneDriveException e) { if (e.httpStatusCode == 404) { // No .. that ID is GONE log.vlog("Remote change discarded - item cannot be found"); return; } if (e.httpStatusCode >= 500) { // OneDrive returned a 'HTTP 5xx Server Side Error' - gracefully handling error - error message already logged return; } } // Yes .. ID is still on OneDrive but elsewhere .... #341 edge case handling // What is the original local path for this ID in the database? Does it match 'syncFolderChildPath' if (itemdb.idInLocalDatabase(driveId, item["id"].str)){ // item is in the database string originalLocalPath = itemdb.computePath(driveId, item["id"].str); if (canFind(originalLocalPath, syncFolderChildPath)){ // This 'change' relates to an item that WAS in 'syncFolderChildPath' but is now // stored elsewhere on OneDrive - outside the path we are syncing from // Remove this item locally as it's local path is now obsolete idsToDelete ~= [driveId, item["id"].str]; } else { // out of scope for some other reason if (singleDirectoryScope){ log.vlog("Remote change discarded - not in --single-directory sync scope"); } else { log.vlog("Remote change discarded - not in sync scope"); } log.vdebug("Remote change discarded: ", item); } } else { // item is not in the database if (singleDirectoryScope){ // We are syncing a single directory, so this is the reason why it is out of scope log.vlog("Remote change discarded - not in --single-directory sync scope"); log.vdebug("Remote change discarded: ", item); } else { // Not a single directory sync if (syncBusinessFolders) { // if we are syncing shared business folders, a 'change' may be out of scope as we are not syncing that 'folder' // but we are sent all changes from the 'parent root' as we cannot query the 'delta' for this folder // as that is a 501 error - not implemented log.vlog("Remote change discarded - not in business shared folders sync scope"); log.vdebug("Remote change discarded: ", item); } else { // out of scope for some other reason log.vlog("Remote change discarded - not in sync scope"); log.vdebug("Remote change discarded: ", item); } } } } } } } else { // No changes reported on OneDrive log.vdebug("OneDrive Reported no delta changes - Local path and OneDrive in-sync"); } // the response may contain either @odata.deltaLink or @odata.nextLink if ("@odata.deltaLink" in changes) deltaLink = changes["@odata.deltaLink"].str; if (deltaLink) itemdb.setDeltaLink(driveId, id, deltaLink); if ("@odata.nextLink" in changes) deltaLink = changes["@odata.nextLink"].str; else break; } else { // Log that an invalid JSON object was returned if ((driveId == defaultDriveId) || (!syncBusinessFolders)) { log.vdebug("onedrive.viewChangesByItemId call returned an invalid JSON Object"); } else { log.vdebug("onedrive.viewChangesByDriveId call returned an invalid JSON Object"); } } } // delete items in idsToDelete if (idsToDelete.length > 0) deleteItems(); // empty the skipped items skippedItems.length = 0; assumeSafeAppend(skippedItems); } // process the change of a single DriveItem private void applyDifference(JSONValue driveItem, string driveId, bool isRoot) { // Format the OneDrive change into a consumable object for the database Item item = makeItem(driveItem); // Reset the malwareDetected flag for this item malwareDetected = false; // Reset the downloadFailed flag for this item downloadFailed = false; if(isItemDeleted(driveItem)){ // Change is to delete an item log.vdebug("Remote deleted item"); } else { // Is the change from OneDrive a 'root' item // The change should be considered a 'root' item if: // 1. Contains a ["root"] element // 2. Has no ["parentReference"]["id"] ... #323 & #324 highlighted that this is false as some 'root' shared objects now can have an 'id' element .. OneDrive API change // 2. Has no ["parentReference"]["path"] // 3. Was detected by an input flag as to be handled as a root item regardless of actual status if (isItemRoot(driveItem) || !hasParentReferencePath(driveItem) || isRoot) { log.vdebug("Handing a OneDrive 'root' change"); item.parentId = null; // ensures that it has no parent item.driveId = driveId; // HACK: makeItem() cannot set the driveId property of the root log.vdebug("Update/Insert local database with item details"); itemdb.upsert(item); log.vdebug("item details: ", item); return; } } bool unwanted; // Check if the parent id is something we need to skip if (skippedItems.find(item.parentId).length != 0) { // Potentially need to flag as unwanted log.vdebug("Flagging as unwanted: find(item.parentId).length != 0"); unwanted = true; // Is this item id in the database? if (itemdb.idInLocalDatabase(item.driveId, item.id)){ // item exists in database, most likely moved out of scope for current client configuration log.vdebug("This item was previously synced / seen by the client"); if (("name" in driveItem["parentReference"]) != null) { // How is this out of scope? if (selectiveSync.isPathExcludedViaSyncList(driveItem["parentReference"]["name"].str)) { // Previously synced item is now out of scope as it has been moved out of what is included in sync_list log.vdebug("This previously synced item is now excluded from being synced due to sync_list exclusion"); } // flag to delete local file as it now is no longer in sync with OneDrive log.vdebug("Flagging to delete item locally"); idsToDelete ~= [item.driveId, item.id]; } } } // Check if this is excluded by config option: skip_dir if (!unwanted) { // Only check path if config is != "" if (cfg.getValueString("skip_dir") != "") { // Is the item a folder and not a deleted item? if ((isItemFolder(driveItem)) && (!isItemDeleted(driveItem))) { // work out the 'snippet' path where this folder would be created string simplePathToCheck = ""; string complexPathToCheck = ""; string matchDisplay = ""; if (hasParentReference(driveItem)) { // we need to workout the FULL path for this item string parentDriveId = driveItem["parentReference"]["driveId"].str; string parentItem = driveItem["parentReference"]["id"].str; // simple path if (("name" in driveItem["parentReference"]) != null) { simplePathToCheck = driveItem["parentReference"]["name"].str ~ "/" ~ driveItem["name"].str; } else { simplePathToCheck = driveItem["name"].str; } // complex path complexPathToCheck = itemdb.computePath(parentDriveId, parentItem) ~ "/" ~ driveItem["name"].str; complexPathToCheck = buildNormalizedPath(complexPathToCheck); log.vdebug("skip_dir path to check (simple): ", simplePathToCheck); log.vdebug("skip_dir path to check (complex): ", complexPathToCheck); } else { simplePathToCheck = driveItem["name"].str; } // OK .. what checks are we doing? if ((simplePathToCheck != "") && (complexPathToCheck == "")) { // just a simple check log.vdebug("Performing a simple check only"); unwanted = selectiveSync.isDirNameExcluded(simplePathToCheck); } else { // simple and complex log.vdebug("Performing a simple & complex path match if required"); // simple first unwanted = selectiveSync.isDirNameExcluded(simplePathToCheck); matchDisplay = simplePathToCheck; if (!unwanted) { log.vdebug("Simple match was false, attempting complex match"); // simple didnt match, perform a complex check unwanted = selectiveSync.isDirNameExcluded(complexPathToCheck); matchDisplay = complexPathToCheck; } } log.vdebug("Result: ", unwanted); if (unwanted) log.vlog("Skipping item - excluded by skip_dir config match: ", matchDisplay); } } } // Check if this is excluded by config option: skip_file if (!unwanted) { // Is the item a file and not a deleted item? if ((isItemFile(driveItem)) && (!isItemDeleted(driveItem))) { log.vdebug("skip_file item to check: ", item.name); unwanted = selectiveSync.isFileNameExcluded(item.name); log.vdebug("Result: ", unwanted); if (unwanted) log.vlog("Skipping item - excluded by skip_file config: ", item.name); } } // check the item type if (!unwanted) { if (isItemFile(driveItem)) { log.vdebug("The item we are syncing is a file"); } else if (isItemFolder(driveItem)) { log.vdebug("The item we are syncing is a folder"); } else if (isItemRemote(driveItem)) { log.vdebug("The item we are syncing is a remote item"); assert(isItemFolder(driveItem["remoteItem"]), "The remote item is not a folder"); } else { log.vlog("This item type (", item.name, ") is not supported"); unwanted = true; log.vdebug("Flagging as unwanted: item type is not supported"); } } // check for selective sync string path; if (!unwanted) { // Is the item parent in the local database? if (itemdb.idInLocalDatabase(item.driveId, item.parentId)){ // compute the item path to see if the path is excluded path = itemdb.computePath(item.driveId, item.parentId) ~ "/" ~ item.name; path = buildNormalizedPath(path); if (selectiveSync.isPathExcludedViaSyncList(path)) { // selective sync advised to skip, however is this a file and are we configured to upload / download files in the root? if ((isItemFile(driveItem)) && (cfg.getValueBool("sync_root_files")) && (rootName(path) == "") ) { // This is a file // We are configured to sync all files in the root // This is a file in the logical root unwanted = false; } else { // path is unwanted unwanted = true; log.vlog("Skipping item - excluded by sync_list config: ", path); // flagging to skip this file now, but does this exist in the DB thus needs to be removed / deleted? if (itemdb.idInLocalDatabase(item.driveId, item.id)){ log.vlog("Flagging item for local delete as item exists in database: ", path); // flag to delete idsToDelete ~= [item.driveId, item.id]; } } } } else { // Parent not in the database // Is the parent a 'folder' from another user? ie - is this a 'shared folder' that has been shared with us? if (defaultDriveId == item.driveId){ // Flagging as unwanted log.vdebug("Flagging as unwanted: item.driveId (", item.driveId,"), item.parentId (", item.parentId,") not in local database"); unwanted = true; } else { // Edge case as the parent (from another users OneDrive account) will never be in the database log.vdebug("Parent not in database but appears to be a shared folder: item.driveId (", item.driveId,"), item.parentId (", item.parentId,") not in local database"); item.parentId = null; // ensures that it has no parent log.vdebug("Update/Insert local database with item details"); itemdb.upsert(item); log.vdebug("item details: ", item); return; } } } // skip downloading dot files if configured if (cfg.getValueBool("skip_dotfiles")) { if (isDotFile(path)) { log.vlog("Skipping item - .file or .folder: ", path); unwanted = true; } } // skip unwanted items early if (unwanted) { log.vdebug("Skipping OneDrive change as this is determined to be unwanted"); skippedItems ~= item.id; return; } // check if the item has been seen before Item oldItem; bool cached = itemdb.selectById(item.driveId, item.id, oldItem); // check if the item is going to be deleted if (isItemDeleted(driveItem)) { // item.name is not available, so we get a bunch of meaningless log output // Item name we will attempt to delete will be printed out later if (cached) { // flag to delete log.vdebug("Flagging item for deletion: ", item); idsToDelete ~= [item.driveId, item.id]; } else { // flag to ignore log.vdebug("Flagging item to skip: ", item); skippedItems ~= item.id; } return; } // rename the local item if it is unsynced and there is a new version of it on OneDrive string oldPath; if (cached && item.eTag != oldItem.eTag) { // Is the item in the local database if (itemdb.idInLocalDatabase(item.driveId, item.id)){ oldPath = itemdb.computePath(item.driveId, item.id); if (!isItemSynced(oldItem, oldPath)) { if (exists(oldPath)) { // Is the local file technically 'newer' based on UTC timestamp? SysTime localModifiedTime = timeLastModified(oldPath).toUTC(); localModifiedTime.fracSecs = Duration.zero; item.mtime.fracSecs = Duration.zero; if (localModifiedTime > item.mtime) { // local file is newer than item on OneDrive // no local rename // no download needed log.vlog("Local item modified time is newer based on UTC time conversion - keeping local item"); log.vdebug("Skipping OneDrive change as this is determined to be unwanted due to local item modified time being newer than OneDrive item"); skippedItems ~= item.id; return; } else { // remote file is newer than local item log.vlog("Remote item modified time is newer based on UTC time conversion"); auto ext = extension(oldPath); auto newPath = path.chomp(ext) ~ "-" ~ deviceName ~ ext; log.vlog("The local item is out-of-sync with OneDrive, renaming to preserve existing file: ", oldPath, " -> ", newPath); if (!dryRun) { safeRename(oldPath); } else { // Expectation here is that there is a new file locally (newPath) however as we don't create this, the "new file" will not be uploaded as it does not exist log.vdebug("DRY-RUN: Skipping local file rename"); } } } cached = false; } } } // update the item if (cached) { log.vdebug("OneDrive change is an update to an existing local item"); applyChangedItem(oldItem, oldPath, item, path); } else { log.vdebug("OneDrive change is a new local item"); // Check if file should be skipped based on size limit if (isItemFile(driveItem)) { if (cfg.getValueLong("skip_size") != 0) { if (driveItem["size"].integer >= this.newSizeLimit) { log.vlog("Skipping item - excluded by skip_size config: ", item.name, " (", driveItem["size"].integer/2^^20, " MB)"); return; } } } applyNewItem(item, path); } if ((malwareDetected == false) && (downloadFailed == false)){ // save the item in the db // if the file was detected as malware and NOT downloaded, we dont want to falsify the DB as downloading it as otherwise the next pass will think it was deleted, thus delete the remote item // Likewise if the download failed, we dont want to falsify the DB as downloading it as otherwise the next pass will think it was deleted, thus delete the remote item if (cached) { log.vdebug("Updating local database with item details"); itemdb.update(item); } else { log.vdebug("Inserting item details to local database"); itemdb.insert(item); } // What was the item that was saved log.vdebug("item details: ", item); } } // download an item that was not synced before private void applyNewItem(Item item, string path) { if (exists(path)) { if (isItemSynced(item, path)) { //log.vlog("The item is already present"); return; } else { // TODO: force remote sync by deleting local item // Is the local file technically 'newer' based on UTC timestamp? SysTime localModifiedTime = timeLastModified(path).toUTC(); localModifiedTime.fracSecs = Duration.zero; item.mtime.fracSecs = Duration.zero; if (localModifiedTime > item.mtime) { // local file is newer than item on OneDrive // no local rename // no download needed log.vlog("Local item modified time is newer based on UTC time conversion - keeping local item"); log.vdebug("Skipping OneDrive change as this is determined to be unwanted due to local item modified time being newer than OneDrive item"); return; } else { // remote file is newer than local item log.vlog("Remote item modified time is newer based on UTC time conversion"); auto ext = extension(path); auto newPath = path.chomp(ext) ~ "-" ~ deviceName ~ ext; log.vlog("The local item is out-of-sync with OneDrive, renaming to preserve existing file: ", path, " -> ", newPath); if (!dryRun) { safeRename(path); } else { // Expectation here is that there is a new file locally (newPath) however as we don't create this, the "new file" will not be uploaded as it does not exist log.vdebug("DRY-RUN: Skipping local file rename"); } } } } final switch (item.type) { case ItemType.file: downloadFileItem(item, path); if (dryRun) { // we dont download the file, but we need to track that we 'faked it' idsFaked ~= [item.driveId, item.id]; } break; case ItemType.dir: case ItemType.remote: log.log("Creating directory: ", path); // Issue #658 handling auto syncListExcluded = selectiveSync.isPathExcludedViaSyncList(path); log.vdebug("sync_list excluded: ", syncListExcluded); if (!syncListExcluded) { // path we are creating is not excluded via sync_list setSyncListFullScanTrigger(); } if (!dryRun) { mkdirRecurse(path); } else { // we dont create the directory, but we need to track that we 'faked it' idsFaked ~= [item.driveId, item.id]; } break; } } // update a local item // the local item is assumed to be in sync with the local db private void applyChangedItem(Item oldItem, string oldPath, Item newItem, string newPath) { assert(oldItem.driveId == newItem.driveId); assert(oldItem.id == newItem.id); assert(oldItem.type == newItem.type); assert(oldItem.remoteDriveId == newItem.remoteDriveId); assert(oldItem.remoteId == newItem.remoteId); if (oldItem.eTag != newItem.eTag) { // handle changed name/path if (oldPath != newPath) { log.log("Moving ", oldPath, " to ", newPath); if (exists(newPath)) { Item localNewItem; if (itemdb.selectByPath(newPath, defaultDriveId, localNewItem)) { if (isItemSynced(localNewItem, newPath)) { log.vlog("Destination is in sync and will be overwritten"); } else { // TODO: force remote sync by deleting local item log.vlog("The destination is occupied, renaming the conflicting file..."); safeRename(newPath); } } else { // to be overwritten item is not already in the itemdb, so it should // be synced. Do a safe rename here, too. // TODO: force remote sync by deleting local item log.vlog("The destination is occupied by new file, renaming the conflicting file..."); safeRename(newPath); } } rename(oldPath, newPath); } // handle changed content and mtime // HACK: use mtime+hash instead of cTag because of https://github.com/OneDrive/onedrive-api-docs/issues/765 if (newItem.type == ItemType.file && oldItem.mtime != newItem.mtime && !testFileHash(newPath, newItem)) { downloadFileItem(newItem, newPath); } // handle changed time if (newItem.type == ItemType.file && oldItem.mtime != newItem.mtime) { try { setTimes(newPath, newItem.mtime, newItem.mtime); } catch (FileException e) { // display the error message displayFileSystemErrorMessage(e.msg); } } } } // downloads a File resource private void downloadFileItem(Item item, string path) { assert(item.type == ItemType.file); write("Downloading file ", path, " ... "); JSONValue fileDetails; try { fileDetails = onedrive.getFileDetails(item.driveId, item.id); } catch (OneDriveException e) { log.error("ERROR: Query of OneDrive for file details failed"); if (e.httpStatusCode >= 500) { // OneDrive returned a 'HTTP 5xx Server Side Error' - gracefully handling error - error message already logged downloadFailed = true; return; } } // fileDetails has to be a valid JSON object if (fileDetails.type() == JSONType.object){ if (isMalware(fileDetails)){ // OneDrive reports that this file is malware log.error("ERROR: MALWARE DETECTED IN FILE - DOWNLOAD SKIPPED"); // set global flag malwareDetected = true; return; } } else { // Issue #550 handling log.error("ERROR: Query of OneDrive for file details failed"); log.vdebug("onedrive.getFileDetails call returned an invalid JSON Object"); // We want to return, cant download downloadFailed = true; return; } if (!dryRun) { ulong fileSize = 0; string OneDriveFileHash; // fileDetails should be a valid JSON due to prior check if (hasFileSize(fileDetails)) { // Use the configured filesize as reported by OneDrive fileSize = fileDetails["size"].integer; } else { // filesize missing log.vdebug("WARNING: fileDetails['size'] is missing"); } if (hasHashes(fileDetails)) { // File details returned hash details // QuickXorHash if (hasQuickXorHash(fileDetails)) { // Use the configured quickXorHash as reported by OneDrive if (fileDetails["file"]["hashes"]["quickXorHash"].str != "") { OneDriveFileHash = fileDetails["file"]["hashes"]["quickXorHash"].str; } } // Check for Sha1Hash if (hasSha1Hash(fileDetails)) { // Use the configured sha1Hash as reported by OneDrive if (fileDetails["file"]["hashes"]["sha1Hash"].str != "") { OneDriveFileHash = fileDetails["file"]["hashes"]["sha1Hash"].str; } } } else { // file hash data missing log.vdebug("WARNING: fileDetails['file']['hashes'] is missing - unable to compare file hash after download"); } try { onedrive.downloadById(item.driveId, item.id, path, fileSize); } catch (OneDriveException e) { if ((e.httpStatusCode == 429) || (e.httpStatusCode == 408)) { // HTTP request returned status code 429 (Too Many Requests) // https://github.com/abraunegg/onedrive/issues/133 // or 408 request timeout // https://github.com/abraunegg/onedrive/issues/694 // Back off & retry with incremental delay int retryCount = 10; int retryAttempts = 1; int backoffInterval = 2; while (retryAttempts < retryCount){ Thread.sleep(dur!"seconds"(retryAttempts*backoffInterval)); try { onedrive.downloadById(item.driveId, item.id, path, fileSize); // successful download retryAttempts = retryCount; } catch (OneDriveException e) { if ((e.httpStatusCode == 429) || (e.httpStatusCode == 408)) { // Increment & loop around retryAttempts++; } } } } } catch (std.exception.ErrnoException e) { // There was a file system error // display the error message displayFileSystemErrorMessage(e.msg); downloadFailed = true; return; } // file has to have downloaded in order to set the times / data for the file if (exists(path)) { // A 'file' was downloaded - does what we downloaded = reported fileSize or if there is some sort of funky local disk compression going on // does the file hash OneDrive reports match what we have locally? string quickXorHash = computeQuickXorHash(path); string sha1Hash = computeSha1Hash(path); if ((getSize(path) == fileSize) || (OneDriveFileHash == quickXorHash) || (OneDriveFileHash == sha1Hash)) { // downloaded matches either size or hash log.vdebug("Downloaded file matches reported size and or reported file hash"); try { setTimes(path, item.mtime, item.mtime); } catch (FileException e) { // display the error message displayFileSystemErrorMessage(e.msg); } } else { // size error? if (getSize(path) != fileSize) { // downloaded file size does not match log.error("ERROR: File download size mis-match. Increase logging verbosity to determine why."); } // hash error? if ((OneDriveFileHash != quickXorHash) || (OneDriveFileHash != sha1Hash)) { // downloaded file hash does not match log.error("ERROR: File download hash mis-match. Increase logging verbosity to determine why."); } // we do not want this local file to remain on the local file system safeRemove(path); downloadFailed = true; return; } } else { log.error("ERROR: File failed to download. Increase logging verbosity to determine why."); downloadFailed = true; return; } } if (!downloadFailed) { writeln("done."); log.fileOnly("Downloading file ", path, " ... done."); } } // returns true if the given item corresponds to the local one private bool isItemSynced(Item item, string path) { if (!exists(path)) return false; final switch (item.type) { case ItemType.file: if (isFile(path)) { SysTime localModifiedTime = timeLastModified(path).toUTC(); // HACK: reduce time resolution to seconds before comparing item.mtime.fracSecs = Duration.zero; localModifiedTime.fracSecs = Duration.zero; if (localModifiedTime == item.mtime) { return true; } else { log.vlog("The local item has a different modified time ", localModifiedTime, " remote is ", item.mtime); } if (testFileHash(path, item)) { return true; } else { log.vlog("The local item has a different hash"); } } else { log.vlog("The local item is a directory but should be a file"); } break; case ItemType.dir: case ItemType.remote: if (isDir(path)) { return true; } else { log.vlog("The local item is a file but should be a directory"); } break; } return false; } private void deleteItems() { foreach_reverse (i; idsToDelete) { Item item; if (!itemdb.selectById(i[0], i[1], item)) continue; // check if the item is in the db string path = itemdb.computePath(i[0], i[1]); log.log("Trying to delete item ", path); if (!dryRun) { // Actually process the database entry removal itemdb.deleteById(item.driveId, item.id); if (item.remoteDriveId != null) { // delete the linked remote folder itemdb.deleteById(item.remoteDriveId, item.remoteId); } } bool needsRemoval = false; if (exists(path)) { // path exists on the local system // make sure that the path refers to the correct item Item pathItem; if (itemdb.selectByPath(path, item.driveId, pathItem)) { if (pathItem.id == item.id) { needsRemoval = true; } else { log.log("Skipped due to id difference!"); } } else { // item has disappeared completely needsRemoval = true; } } if (needsRemoval) { log.log("Deleting item ", path); if (!dryRun) { if (isFile(path)) { remove(path); } else { try { // Remove any children of this path if they still exist // Resolve 'Directory not empty' error when deleting local files foreach (DirEntry child; dirEntries(path, SpanMode.depth, false)) { attrIsDir(child.linkAttributes) ? rmdir(child.name) : remove(child.name); } // Remove the path now that it is empty of children rmdirRecurse(path); } catch (FileException e) { // display the error message displayFileSystemErrorMessage(e.msg); } } } } } if (!dryRun) { // clean up idsToDelete idsToDelete.length = 0; assumeSafeAppend(idsToDelete); } } // scan the given directory for differences and new items void scanForDifferences(string path) { // scan for changes log.vlog("Uploading differences of ", path); Item item; if (itemdb.selectByPath(path, defaultDriveId, item)) { uploadDifferences(item); } log.vlog("Uploading new items of ", path); uploadNewItems(path); // clean up idsToDelete only if --dry-run is set if (dryRun) { idsToDelete.length = 0; assumeSafeAppend(idsToDelete); } } private void uploadDifferences(Item item) { // see if this item.id we were supposed to have deleted // match early and return if (dryRun) { foreach (i; idsToDelete) { if (i[1] == item.id) { return; } } } log.vlog("Processing ", item.name); bool unwanted = false; string path; // Is the path excluded? unwanted = selectiveSync.isDirNameExcluded(item.name); // If the path is not excluded, is the filename excluded? if (!unwanted) { unwanted = selectiveSync.isFileNameExcluded(item.name); } // If path or filename does not exclude, is this excluded due to use of selective sync? if (!unwanted) { path = itemdb.computePath(item.driveId, item.id); unwanted = selectiveSync.isPathExcludedViaSyncList(path); } // skip unwanted items if (unwanted) { //log.vlog("Filtered out"); return; } // Restriction and limitations about windows naming files if (!isValidName(path)) { log.vlog("Skipping item - invalid name (Microsoft Naming Convention): ", path); return; } // Check for bad whitespace items if (!containsBadWhiteSpace(path)) { log.vlog("Skipping item - invalid name (Contains an invalid whitespace item): ", path); return; } // Check for HTML ASCII Codes as part of file name if (!containsASCIIHTMLCodes(path)) { log.vlog("Skipping item - invalid name (Contains HTML ASCII Code): ", path); return; } final switch (item.type) { case ItemType.dir: uploadDirDifferences(item, path); break; case ItemType.file: uploadFileDifferences(item, path); break; case ItemType.remote: uploadRemoteDirDifferences(item, path); break; } } private void uploadDirDifferences(Item item, string path) { assert(item.type == ItemType.dir); if (exists(path)) { if (!isDir(path)) { log.vlog("The item was a directory but now it is a file"); uploadDeleteItem(item, path); uploadNewFile(path); } else { log.vlog("The directory has not changed"); // loop through the children foreach (Item child; itemdb.selectChildren(item.driveId, item.id)) { uploadDifferences(child); } } } else { // Directory does not exist locally // If we are in a --dry-run situation - this directory may never have existed as we never downloaded it if (!dryRun) { log.vlog("The directory has been deleted locally"); if (noRemoteDelete) { // do not process remote directory delete log.vlog("Skipping remote directory delete as --upload-only & --no-remote-delete configured"); } else { uploadDeleteItem(item, path); } } else { // we are in a --dry-run situation, directory appears to have deleted locally - this directory may never have existed as we never downloaded it .. // Check if path does not exist in database if (!itemdb.selectByPath(path, defaultDriveId, item)) { // Path not found in database log.vlog("The directory has been deleted locally"); if (noRemoteDelete) { // do not process remote directory delete log.vlog("Skipping remote directory delete as --upload-only & --no-remote-delete configured"); } else { uploadDeleteItem(item, path); } } else { // Path was found in the database // Did we 'fake create it' as part of --dry-run ? foreach (i; idsFaked) { if (i[1] == item.id) { log.vdebug("Matched faked dir which is 'supposed' to exist but not created due to --dry-run use"); log.vlog("The directory has not changed"); return; } } // item.id did not match a 'faked' download new directory creation log.vlog("The directory has been deleted locally"); uploadDeleteItem(item, path); } } } } private void uploadRemoteDirDifferences(Item item, string path) { assert(item.type == ItemType.remote); if (exists(path)) { if (!isDir(path)) { log.vlog("The item was a directory but now it is a file"); uploadDeleteItem(item, path); uploadNewFile(path); } else { log.vlog("The directory has not changed"); // continue through the linked folder assert(item.remoteDriveId && item.remoteId); Item remoteItem; bool found = itemdb.selectById(item.remoteDriveId, item.remoteId, remoteItem); if(found){ // item was found in the database uploadDifferences(remoteItem); } } } else { log.vlog("The directory has been deleted"); uploadDeleteItem(item, path); } } // upload local file system differences to OneDrive private void uploadFileDifferences(Item item, string path) { // Reset upload failure - OneDrive or filesystem issue (reading data) uploadFailed = false; assert(item.type == ItemType.file); if (exists(path)) { if (isFile(path)) { SysTime localModifiedTime = timeLastModified(path).toUTC(); // HACK: reduce time resolution to seconds before comparing item.mtime.fracSecs = Duration.zero; localModifiedTime.fracSecs = Duration.zero; if (localModifiedTime != item.mtime) { log.vlog("The file last modified time has changed"); string eTag = item.eTag; if (!testFileHash(path, item)) { log.vlog("The file content has changed"); write("Uploading modified file ", path, " ... "); JSONValue response; if (!dryRun) { // Are we using OneDrive Personal or OneDrive Business? // To solve 'Multiple versions of file shown on website after single upload' (https://github.com/abraunegg/onedrive/issues/2) // check what 'account type' this is as this issue only affects OneDrive Business so we need some extra logic here if (accountType == "personal"){ // Original file upload logic if (getSize(path) <= thresholdFileSize) { try { response = onedrive.simpleUploadReplace(path, item.driveId, item.id, item.eTag); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } if (e.httpStatusCode == 404) { // HTTP request returned status code 404 - the eTag provided does not exist // Delete record from the local database - file will be uploaded as a new file writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 404 - eTag Issue' - gracefully handling error"); itemdb.deleteById(item.driveId, item.id); uploadFailed = true; return; } // Resolve https://github.com/abraunegg/onedrive/issues/36 if ((e.httpStatusCode == 409) || (e.httpStatusCode == 423)) { // The file is currently checked out or locked for editing by another user // We cant upload this file at this time writeln("skipped."); log.fileOnly("Uploading modified file ", path, " ... skipped."); write("", path, " is currently checked out or locked for editing by another user."); log.fileOnly(path, " is currently checked out or locked for editing by another user."); uploadFailed = true; return; } if (e.httpStatusCode == 412) { // HTTP request returned status code 412 - ETag does not match current item's value // Delete record from the local database - file will be uploaded as a new file writeln("skipped."); log.vdebug("Simple Upload Replace Failed - OneDrive eTag / cTag match issue"); log.vlog("OneDrive returned a 'HTTP 412 - Precondition Failed' - gracefully handling error. Will upload as new file."); itemdb.deleteById(item.driveId, item.id); uploadFailed = true; return; } if (e.httpStatusCode == 504) { // HTTP request returned status code 504 (Gateway Timeout) // Try upload as a session response = session.upload(path, item.driveId, item.parentId, baseName(path), item.eTag); } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } // upload done without error writeln("done."); } else { writeln(""); try { response = session.upload(path, item.driveId, item.parentId, baseName(path), item.eTag); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } if (e.httpStatusCode == 412) { // HTTP request returned status code 412 - ETag does not match current item's value // Delete record from the local database - file will be uploaded as a new file writeln("skipped."); log.vdebug("Simple Upload Replace Failed - OneDrive eTag / cTag match issue"); log.vlog("OneDrive returned a 'HTTP 412 - Precondition Failed' - gracefully handling error. Will upload as new file."); itemdb.deleteById(item.driveId, item.id); uploadFailed = true; return; } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } // upload done without error writeln("done."); } } else { // OneDrive Business Account // We need to always use a session to upload, but handle the changed file correctly if (accountType == "business"){ // For logging consistency writeln(""); try { response = session.upload(path, item.driveId, item.parentId, baseName(path), item.eTag); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } // Resolve https://github.com/abraunegg/onedrive/issues/36 if ((e.httpStatusCode == 409) || (e.httpStatusCode == 423)) { // The file is currently checked out or locked for editing by another user // We cant upload this file at this time writeln("skipped."); log.fileOnly("Uploading modified file ", path, " ... skipped."); writeln("", path, " is currently checked out or locked for editing by another user."); log.fileOnly(path, " is currently checked out or locked for editing by another user."); uploadFailed = true; return; } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } // upload done without error writeln("done."); // As the session.upload includes the last modified time, save the response // Is the response a valid JSON object - validation checking done in saveItem saveItem(response); } // OneDrive documentLibrary if (accountType == "documentLibrary"){ // Handle certain file types differently if ((extension(path) == ".txt") || (extension(path) == ".csv")) { // .txt and .csv are unaffected by https://github.com/OneDrive/onedrive-api-docs/issues/935 // For logging consistency writeln(""); try { response = session.upload(path, item.driveId, item.parentId, baseName(path), item.eTag); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } // Resolve https://github.com/abraunegg/onedrive/issues/36 if ((e.httpStatusCode == 409) || (e.httpStatusCode == 423)) { // The file is currently checked out or locked for editing by another user // We cant upload this file at this time writeln("skipped."); log.fileOnly("Uploading modified file ", path, " ... skipped."); writeln("", path, " is currently checked out or locked for editing by another user."); log.fileOnly(path, " is currently checked out or locked for editing by another user."); uploadFailed = true; return; } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } // upload done without error writeln("done."); // As the session.upload includes the last modified time, save the response // Is the response a valid JSON object - validation checking done in saveItem saveItem(response); } else { // Due to https://github.com/OneDrive/onedrive-api-docs/issues/935 Microsoft modifies all PDF, MS Office & HTML files with added XML content. It is a 'feature' of SharePoint. // This means, as a session upload, on 'completion' the file is 'moved' and generates a 404 ...... writeln("skipped."); log.fileOnly("Uploading modified file ", path, " ... skipped."); log.vlog("Skip Reason: Microsoft Sharepoint 'enrichment' after upload issue"); log.vlog("See: https://github.com/OneDrive/onedrive-api-docs/issues/935 for further details"); // Delete record from the local database - file will be uploaded as a new file itemdb.deleteById(item.driveId, item.id); return; } } } log.fileOnly("Uploading modified file ", path, " ... done."); if ("cTag" in response) { // use the cTag instead of the eTag because OneDrive may update the metadata of files AFTER they have been uploaded via simple upload eTag = response["cTag"].str; } else { // Is there an eTag in the response? if ("eTag" in response) { // use the eTag from the response as there was no cTag eTag = response["eTag"].str; } else { // no tag available - set to nothing eTag = ""; } } } else { // we are --dry-run - simulate the file upload writeln("done."); response = createFakeResponse(path); // Log action to log file log.fileOnly("Uploading modified file ", path, " ... done."); // Is the response a valid JSON object - validation checking done in saveItem saveItem(response); return; } } if (accountType == "personal"){ // If Personal, call to update the modified time as stored on OneDrive if (!dryRun) { uploadLastModifiedTime(item.driveId, item.id, eTag, localModifiedTime.toUTC()); } } } else { log.vlog("The file has not changed"); } } else { log.vlog("The item was a file but now is a directory"); uploadDeleteItem(item, path); uploadCreateDir(path); } } else { // File does not exist locally // If we are in a --dry-run situation - this file may never have existed as we never downloaded it if (!dryRun) { // Not --dry-run situation log.vlog("The file has been deleted locally"); if (noRemoteDelete) { // do not process remote file delete log.vlog("Skipping remote file delete as --upload-only & --no-remote-delete configured"); } else { uploadDeleteItem(item, path); } } else { // We are in a --dry-run situation, file appears to have deleted locally - this file may never have existed as we never downloaded it .. // Check if path does not exist in database if (!itemdb.selectByPath(path, defaultDriveId, item)) { // file not found in database log.vlog("The file has been deleted locally"); if (noRemoteDelete) { // do not process remote file delete log.vlog("Skipping remote file delete as --upload-only & --no-remote-delete configured"); } else { uploadDeleteItem(item, path); } } else { // file was found in the database // Did we 'fake create it' as part of --dry-run ? foreach (i; idsFaked) { if (i[1] == item.id) { log.vdebug("Matched faked file which is 'supposed' to exist but not created due to --dry-run use"); log.vlog("The file has not changed"); return; } } // item.id did not match a 'faked' download new file creation log.vlog("The file has been deleted locally"); if (noRemoteDelete) { // do not process remote file delete log.vlog("Skipping remote file delete as --upload-only & --no-remote-delete configured"); } else { uploadDeleteItem(item, path); } } } } } // upload new items to OneDrive private void uploadNewItems(string path) { // https://support.microsoft.com/en-us/help/3125202/restrictions-and-limitations-when-you-sync-files-and-folders // If the path is greater than allowed characters, then one drive will return a '400 - Bad Request' // Need to ensure that the URI is encoded before the check is made // 400 Character Limit for OneDrive Business / Office 365 // 430 Character Limit for OneDrive Personal auto maxPathLength = 0; import std.range : walkLength; import std.uni : byGrapheme; if (accountType == "business"){ // Business Account maxPathLength = 400; } else { // Personal Account maxPathLength = 430; } // A short lived file that has disappeared will cause an error - is the path valid? if (!exists(path)) { log.log("Skipping item - has disappeared: ", path); return; } // Invalid UTF-8 sequence check // https://github.com/skilion/onedrive/issues/57 // https://github.com/abraunegg/onedrive/issues/487 if(!isValid(path)) { // Path is not valid according to https://dlang.org/phobos/std_encoding.html log.vlog("Skipping item - invalid character sequences: ", path); return; } if(path.byGrapheme.walkLength < maxPathLength){ // path is less than maxPathLength // skip dot files if configured if (cfg.getValueBool("skip_dotfiles")) { if (isDotFile(path)) { log.vlog("Skipping item - .file or .folder: ", path); return; } } // Do we need to check for .nosync? Only if --check-for-nosync was passed in if (cfg.getValueBool("check_nosync")) { if (exists(path ~ "/.nosync")) { log.vlog("Skipping item - .nosync found & --check-for-nosync enabled: ", path); return; } } if (isSymlink(path)) { // if config says so we skip all symlinked items if (cfg.getValueBool("skip_symlinks")) { log.vlog("Skipping item - skip symbolic links configured: ", path); return; } // skip unexisting symbolic links else if (!exists(readLink(path))) { log.vlog("Skipping item - invalid symbolic link: ", path); return; } } // Restriction and limitations about windows naming files if (!isValidName(path)) { log.vlog("Skipping item - invalid name (Microsoft Naming Convention): ", path); return; } // Check for bad whitespace items if (!containsBadWhiteSpace(path)) { log.vlog("Skipping item - invalid name (Contains an invalid whitespace item): ", path); return; } // Check for HTML ASCII Codes as part of file name if (!containsASCIIHTMLCodes(path)) { log.vlog("Skipping item - invalid name (Contains HTML ASCII Code): ", path); return; } // filter out user configured items to skip if (path != ".") { if (isDir(path)) { log.vdebug("Checking path: ", path); // Only check path if config is != "" if (cfg.getValueString("skip_dir") != "") { if (selectiveSync.isDirNameExcluded(strip(path,"./"))) { log.vlog("Skipping item - excluded by skip_dir config: ", path); return; } } } if (isFile(path)) { log.vdebug("Checking file: ", path); if (selectiveSync.isFileNameExcluded(strip(path,"./"))) { log.vlog("Skipping item - excluded by skip_file config: ", path); return; } } if (selectiveSync.isPathExcludedViaSyncList(path)) { if ((isFile(path)) && (cfg.getValueBool("sync_root_files")) && (rootName(strip(path,"./")) == "")) { log.vdebug("Not skipping path due to sync_root_files inclusion: ", path); } else { string userSyncList = cfg.configDirName ~ "/sync_list"; if (exists(userSyncList)){ // skipped most likely due to inclusion in sync_list log.vlog("Skipping item - excluded by sync_list config: ", path); return; } else { // skipped for some other reason log.vlog("Skipping item - path excluded by user config: ", path); return; } } } } // This item passed all the unwanted checks // We want to upload this new item if (isDir(path)) { Item item; if (!itemdb.selectByPath(path, defaultDriveId, item)) { uploadCreateDir(path); } // recursively traverse children // the above operation takes time and the directory might have // disappeared in the meantime if (!exists(path)) { log.vlog("Directory disappeared during upload: ", path); return; } // Try and access the directory and any path below try { auto entries = dirEntries(path, SpanMode.shallow, false); foreach (DirEntry entry; entries) { uploadNewItems(entry.name); } } catch (FileException e) { // display the error message displayFileSystemErrorMessage(e.msg); return; } } else { // This item is a file auto fileSize = getSize(path); // Can we upload this file - is there enough free space? - https://github.com/skilion/onedrive/issues/73 // However if the OneDrive account does not provide the quota details, we have no idea how much free space is available if ((!quotaAvailable) || ((remainingFreeSpace - fileSize) > 0)){ if (!quotaAvailable) { log.vlog("Ignoring OneDrive account quota details to upload file - this may fail if not enough space on OneDrive .."); } Item item; if (!itemdb.selectByPath(path, defaultDriveId, item)) { // item is not in the database, upload new file uploadNewFile(path); if (!uploadFailed) { // upload did not fail remainingFreeSpace = (remainingFreeSpace - fileSize); log.vlog("Remaining free space: ", remainingFreeSpace); } } } else { // Not enough free space log.log("Skipping item '", path, "' due to insufficient free space available on OneDrive"); } } } else { // This path was skipped - why? log.log("Skipping item '", path, "' due to the full path exceeding ", maxPathLength, " characters (Microsoft OneDrive limitation)"); } } // create new directory on OneDrive private void uploadCreateDir(const(string) path) { log.vlog("OneDrive Client requested to create remote path: ", path); JSONValue onedrivePathDetails; Item parent; // Was the path entered the root path? if (path != "."){ // If this is null or empty - we cant query the database properly if ((parent.driveId == "") && (parent.id == "")){ // What path to use? string parentPath = dirName(path); // will be either . or something else try { log.vdebug("Attempting to query OneDrive for this parent path: ", parentPath); onedrivePathDetails = onedrive.getPathDetails(parentPath); } catch (OneDriveException e) { // exception - set onedriveParentRootDetails to a blank valid JSON onedrivePathDetails = parseJSON("{}"); if (e.httpStatusCode == 404) { // Parent does not exist ... need to create parent log.vdebug("Parent path does not exist: ", parentPath); uploadCreateDir(parentPath); } if (e.httpStatusCode >= 500) { // OneDrive returned a 'HTTP 5xx Server Side Error' - gracefully handling error - error message already logged return; } } // configure the parent item data if (hasId(onedrivePathDetails) && hasParentReference(onedrivePathDetails)){ log.vdebug("Parent path found, configuring parent item"); parent.id = onedrivePathDetails["id"].str; // This item's ID. Should give something like 12345ABCDE1234A1!101 parent.driveId = onedrivePathDetails["parentReference"]["driveId"].str; // Should give something like 12345abcde1234a1 } else { // OneDrive API query failed // Assume client defaults log.vdebug("Parent path could not be queried, using OneDrive account defaults"); parent.id = defaultRootId; // Should give something like 12345ABCDE1234A1!101 parent.driveId = defaultDriveId; // Should give something like 12345abcde1234a1 } } JSONValue response; // test if the path we are going to create already exists on OneDrive try { log.vdebug("Attempting to query OneDrive for this path: ", path); response = onedrive.getPathDetails(path); } catch (OneDriveException e) { if (e.httpStatusCode == 404) { // The directory was not found log.vlog("The requested directory to create was not found on OneDrive - creating remote directory: ", path); if (!dryRun) { // Perform the database lookup enforce(itemdb.selectByPath(dirName(path), parent.driveId, parent), "The parent item id is not in the database"); JSONValue driveItem = [ "name": JSONValue(baseName(path)), "folder": parseJSON("{}") ]; // Submit the creation request // Fix for https://github.com/skilion/onedrive/issues/356 try { response = onedrive.createById(parent.driveId, parent.id, driveItem); } catch (OneDriveException e) { if (e.httpStatusCode == 409) { // OneDrive API returned a 404 (above) to say the directory did not exist // but when we attempted to create it, OneDrive responded that it now already exists log.vlog("OneDrive reported that ", path, " already exists .. OneDrive API race condition"); return; } else { // some other error from OneDrive was returned - display what it is log.error("OneDrive generated an error when creating this path: ", path); displayOneDriveErrorMessage(e.msg); return; } } // Is the response a valid JSON object - validation checking done in saveItem saveItem(response); } else { // Simulate a successful 'directory create' & save it to the dryRun database copy // The simulated response has to pass 'makeItem' as part of saveItem auto fakeResponse = createFakeResponse(path); saveItem(fakeResponse); } log.vlog("Successfully created the remote directory ", path, " on OneDrive"); return; } if (e.httpStatusCode >= 500) { // OneDrive returned a 'HTTP 5xx Server Side Error' - gracefully handling error - error message already logged return; } } // response from OneDrive has to be a valid JSON object if (response.type() == JSONType.object){ // https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file // Do not assume case sensitivity. For example, consider the names OSCAR, Oscar, and oscar to be the same, // even though some file systems (such as a POSIX-compliant file system) may consider them as different. // Note that NTFS supports POSIX semantics for case sensitivity but this is not the default behavior. if (response["name"].str == baseName(path)){ // OneDrive 'name' matches local path name log.vlog("The requested directory to create was found on OneDrive - skipping creating the directory: ", path ); // Check that this path is in the database if (!itemdb.selectById(parent.driveId, parent.id, parent)){ // parent for 'path' is NOT in the database log.vlog("The parent for this path is not in the local database - need to add parent to local database"); string parentPath = dirName(path); uploadCreateDir(parentPath); } else { // parent is in database log.vlog("The parent for this path is in the local database - adding requested path (", path ,") to database"); auto res = onedrive.getPathDetails(path); // Is the response a valid JSON object - validation checking done in saveItem saveItem(res); } } else { // They are the "same" name wise but different in case sensitivity log.error("ERROR: Current directory has a 'case-insensitive match' to an existing directory on OneDrive"); log.error("ERROR: To resolve, rename this local directory: ", absolutePath(path)); log.error("ERROR: Remote OneDrive directory: ", response["name"].str); log.log("Skipping: ", absolutePath(path)); return; } } else { // response is not valid JSON, an error was returned from OneDrive log.error("ERROR: There was an error performing this operation on OneDrive"); log.error("ERROR: Increase logging verbosity to assist determining why."); log.log("Skipping: ", absolutePath(path)); return; } } } // upload a new file to OneDrive private void uploadNewFile(string path) { // Reset upload failure - OneDrive or filesystem issue (reading data) uploadFailed = false; Item parent; // Check the database for the parent //enforce(itemdb.selectByPath(dirName(path), defaultDriveId, parent), "The parent item is not in the local database"); if ((dryRun) || (itemdb.selectByPath(dirName(path), defaultDriveId, parent))) { // Maximum file size upload // https://support.microsoft.com/en-au/help/3125202/restrictions-and-limitations-when-you-sync-files-and-folders // 1. OneDrive Business say's 15GB // 2. Another article updated April 2018 says 20GB: // https://answers.microsoft.com/en-us/onedrive/forum/odoptions-oddesktop-sdwin10/personal-onedrive-file-upload-size-max/a3621fc9-b766-4a99-99f8-bcc01ccb025f // Use smaller size for now auto maxUploadFileSize = 16106127360; // 15GB //auto maxUploadFileSize = 21474836480; // 20GB auto thisFileSize = getSize(path); // To avoid a 409 Conflict error - does the file actually exist on OneDrive already? JSONValue fileDetailsFromOneDrive; // Can we read the file - as a permissions issue or file corruption will cause a failure // https://github.com/abraunegg/onedrive/issues/113 if (readLocalFile(path)){ // able to read the file if (thisFileSize <= maxUploadFileSize){ // Resolves: https://github.com/skilion/onedrive/issues/121, https://github.com/skilion/onedrive/issues/294, https://github.com/skilion/onedrive/issues/329 // Does this 'file' already exist on OneDrive? try { // test if the local path exists on OneDrive fileDetailsFromOneDrive = onedrive.getPathDetails(path); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' writeln("Skipping item - OneDrive returned a 'HTTP 401 - Unauthorized' when attempting to query if file exists"); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); return; } if (e.httpStatusCode == 404) { // The file was not found on OneDrive, need to upload it // Check if file should be skipped based on skip_size config if (thisFileSize >= this.newSizeLimit) { writeln("Skipping item - excluded by skip_size config: ", path, " (", thisFileSize/2^^20," MB)"); return; } write("Uploading new file ", path, " ... "); JSONValue response; if (!dryRun) { // Resolve https://github.com/abraunegg/onedrive/issues/37 if (thisFileSize == 0){ // We can only upload zero size files via simpleFileUpload regardless of account type // https://github.com/OneDrive/onedrive-api-docs/issues/53 try { response = onedrive.simpleUpload(path, parent.driveId, parent.id, baseName(path)); } catch (OneDriveException e) { // error uploading file // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } } else { // File is not a zero byte file // Are we using OneDrive Personal or OneDrive Business? // To solve 'Multiple versions of file shown on website after single upload' (https://github.com/abraunegg/onedrive/issues/2) // check what 'account type' this is as this issue only affects OneDrive Business so we need some extra logic here if (accountType == "personal"){ // Original file upload logic if (thisFileSize <= thresholdFileSize) { try { response = onedrive.simpleUpload(path, parent.driveId, parent.id, baseName(path)); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } if (e.httpStatusCode == 504) { // HTTP request returned status code 504 (Gateway Timeout) // Try upload as a session try { response = session.upload(path, parent.driveId, parent.id, baseName(path)); } catch (OneDriveException e) { // error uploading file // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } } else { // File larger than threshold - use a session to upload writeln(""); try { response = session.upload(path, parent.driveId, parent.id, baseName(path)); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } } } else { // OneDrive Business Account - always use a session to upload writeln(""); try { response = session.upload(path, parent.driveId, parent.id, baseName(path)); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } } } // response from OneDrive has to be a valid JSON object if (response.type() == JSONType.object){ // upload done without error writeln("done."); // Log action to log file log.fileOnly("Uploading new file ", path, " ... done."); // The file was uploaded, or a 4xx / 5xx error was generated if ("size" in response){ // The response JSON contains size, high likelihood valid response returned ulong uploadFileSize = response["size"].integer; // In some cases the file that was uploaded was not complete, but 'completed' without errors on OneDrive // This has been seen with PNG / JPG files mainly, which then contributes to generating a 412 error when we attempt to update the metadata // Validate here that the file uploaded, at least in size, matches in the response to what the size is on disk if (thisFileSize != uploadFileSize){ if(disableUploadValidation){ // Print a warning message log.log("WARNING: Uploaded file size does not match local file - skipping upload validation"); } else { // OK .. the uploaded file does not match and we did not disable this validation log.log("Uploaded file size does not match local file - upload failure - retrying"); // Delete uploaded bad file onedrive.deleteById(response["parentReference"]["driveId"].str, response["id"].str, response["eTag"].str); // Re-upload uploadNewFile(path); return; } } // File validation is OK if ((accountType == "personal") || (thisFileSize == 0)){ // Update the item's metadata on OneDrive string id = response["id"].str; string cTag; // Is there a valid cTag in the response? if ("cTag" in response) { // use the cTag instead of the eTag because OneDrive may update the metadata of files AFTER they have been uploaded cTag = response["cTag"].str; } else { // Is there an eTag in the response? if ("eTag" in response) { // use the eTag from the response as there was no cTag cTag = response["eTag"].str; } else { // no tag available - set to nothing cTag = ""; } } if (exists(path)) { SysTime mtime = timeLastModified(path).toUTC(); uploadLastModifiedTime(parent.driveId, id, cTag, mtime); } else { // will be removed in different event! log.log("File disappeared after upload: ", path); } return; } else { // OneDrive Business Account - always use a session to upload // The session includes a Request Body element containing lastModifiedDateTime // which negates the need for a modify event against OneDrive // Is the response a valid JSON object - validation checking done in saveItem saveItem(response); return; } } } else { // response is not valid JSON, an error was returned from OneDrive log.fileOnly("Uploading new file ", path, " ... error"); uploadFailed = true; return; } } else { // we are --dry-run - simulate the file upload writeln("done."); response = createFakeResponse(path); // Log action to log file log.fileOnly("Uploading new file ", path, " ... done."); // Is the response a valid JSON object - validation checking done in saveItem saveItem(response); return; } } if (e.httpStatusCode >= 500) { // OneDrive returned a 'HTTP 5xx Server Side Error' - gracefully handling error - error message already logged uploadFailed = true; return; } } // Check that the filename that is returned is actually the file we wish to upload // https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file // Do not assume case sensitivity. For example, consider the names OSCAR, Oscar, and oscar to be the same, // even though some file systems (such as a POSIX-compliant file system) may consider them as different. // Note that NTFS supports POSIX semantics for case sensitivity but this is not the default behavior. // fileDetailsFromOneDrive has to be a valid object if (fileDetailsFromOneDrive.type() == JSONType.object){ // Check that 'name' is in the JSON response (validates data) and that 'name' == the path we are looking for if (("name" in fileDetailsFromOneDrive) && (fileDetailsFromOneDrive["name"].str == baseName(path))) { // OneDrive 'name' matches local path name log.vlog("Requested file to upload exists on OneDrive - local database is out of sync for this file: ", path); // Is the local file newer than the uploaded file? SysTime localFileModifiedTime = timeLastModified(path).toUTC(); SysTime remoteFileModifiedTime = SysTime.fromISOExtString(fileDetailsFromOneDrive["fileSystemInfo"]["lastModifiedDateTime"].str); localFileModifiedTime.fracSecs = Duration.zero; if (localFileModifiedTime > remoteFileModifiedTime){ // local file is newer log.vlog("Requested file to upload is newer than existing file on OneDrive"); write("Uploading modified file ", path, " ... "); JSONValue response; if (!dryRun) { if (accountType == "personal"){ // OneDrive Personal account upload handling if (thisFileSize <= thresholdFileSize) { try { response = onedrive.simpleUpload(path, parent.driveId, parent.id, baseName(path)); writeln("done."); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } if (e.httpStatusCode == 504) { // HTTP request returned status code 504 (Gateway Timeout) // Try upload as a session try { response = session.upload(path, parent.driveId, parent.id, baseName(path)); writeln("done."); } catch (OneDriveException e) { // error uploading file // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } } else { // File larger than threshold - use a session to upload writeln(""); try { response = session.upload(path, parent.driveId, parent.id, baseName(path)); writeln("done."); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } } // response from OneDrive has to be a valid JSON object if (response.type() == JSONType.object){ // response is a valid JSON object string id = response["id"].str; string cTag; // Is there a valid cTag in the response? if ("cTag" in response) { // use the cTag instead of the eTag because Onedrive may update the metadata of files AFTER they have been uploaded cTag = response["cTag"].str; } else { // Is there an eTag in the response? if ("eTag" in response) { // use the eTag from the response as there was no cTag cTag = response["eTag"].str; } else { // no tag available - set to nothing cTag = ""; } } // validate if path exists so mtime can be calculated if (exists(path)) { SysTime mtime = timeLastModified(path).toUTC(); uploadLastModifiedTime(parent.driveId, id, cTag, mtime); } else { // will be removed in different event! log.log("File disappeared after upload: ", path); } } else { // Log that an invalid JSON object was returned log.vdebug("onedrive.simpleUpload or session.upload call returned an invalid JSON Object"); return; } } else { // OneDrive Business account modified file upload handling if (accountType == "business"){ // OneDrive Business Account - always use a session to upload writeln(""); try { response = session.upload(path, parent.driveId, parent.id, baseName(path), fileDetailsFromOneDrive["eTag"].str); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } // upload complete writeln("done."); saveItem(response); } // OneDrive SharePoint account modified file upload handling if (accountType == "documentLibrary"){ // Depending on the file size, this will depend on how best to handle the modified local file // as if too large, the following error will be generated by OneDrive: // HTTP request returned status code 413 (Request Entity Too Large) // We also cant use a session to upload the file, we have to use simpleUploadReplace if (getSize(path) <= thresholdFileSize) { // Upload file via simpleUploadReplace as below threshold size try { response = onedrive.simpleUploadReplace(path, fileDetailsFromOneDrive["parentReference"]["driveId"].str, fileDetailsFromOneDrive["id"].str, fileDetailsFromOneDrive["eTag"].str); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } } else { // Have to upload via a session, however we have to delete the file first otherwise this will generate a 404 error post session upload // Remove the existing file onedrive.deleteById(fileDetailsFromOneDrive["parentReference"]["driveId"].str, fileDetailsFromOneDrive["id"].str, fileDetailsFromOneDrive["eTag"].str); // Upload as a session, as a new file writeln(""); try { response = session.upload(path, parent.driveId, parent.id, baseName(path)); } catch (OneDriveException e) { if (e.httpStatusCode == 401) { // OneDrive returned a 'HTTP/1.1 401 Unauthorized Error' - file failed to be uploaded writeln("skipped."); log.vlog("OneDrive returned a 'HTTP 401 - Unauthorized' - gracefully handling error"); uploadFailed = true; return; } else { // display what the error is writeln("skipped."); displayOneDriveErrorMessage(e.msg); uploadFailed = true; return; } } catch (FileException e) { // display the error message writeln("skipped."); displayFileSystemErrorMessage(e.msg); uploadFailed = true; return; } } writeln(" done."); // Is the response a valid JSON object - validation checking done in saveItem saveItem(response); // Due to https://github.com/OneDrive/onedrive-api-docs/issues/935 Microsoft modifies all PDF, MS Office & HTML files with added XML content. It is a 'feature' of SharePoint. // So - now the 'local' and 'remote' file is technically DIFFERENT ... thanks Microsoft .. NO way to disable this stupidity if(!uploadOnly){ // Download the Microsoft 'modified' file so 'local' is now in sync log.vlog("Due to Microsoft Sharepoint 'enrichment' of files, downloading 'enriched' file to ensure local file is in-sync"); log.vlog("See: https://github.com/OneDrive/onedrive-api-docs/issues/935 for further details"); auto fileSize = response["size"].integer; onedrive.downloadById(response["parentReference"]["driveId"].str, response["id"].str, path, fileSize); } else { // we are not downloading a file, warn that file differences will exist log.vlog("WARNING: Due to Microsoft Sharepoint 'enrichment' of files, this file is now technically different to your local copy"); log.vlog("See: https://github.com/OneDrive/onedrive-api-docs/issues/935 for further details"); } } } } else { // we are --dry-run - simulate the file upload writeln("done."); response = createFakeResponse(path); // Log action to log file log.fileOnly("Uploading modified file ", path, " ... done."); // Is the response a valid JSON object - validation checking done in saveItem saveItem(response); return; } // Log action to log file log.fileOnly("Uploading modified file ", path, " ... done."); } else { // Save the details of the file that we got from OneDrive // --dry-run safe log.vlog("Updating the local database with details for this file: ", path); saveItem(fileDetailsFromOneDrive); } } else { // The files are the "same" name wise but different in case sensitivity log.error("ERROR: A local file has the same name as another local file."); log.error("ERROR: To resolve, rename this local file: ", absolutePath(path)); log.log("Skipping uploading this new file: ", absolutePath(path)); } } else { // fileDetailsFromOneDrive is not valid JSON, an error was returned from OneDrive log.error("ERROR: An error was returned from OneDrive and the resulting response is not a valid JSON object"); log.error("ERROR: Increase logging verbosity to assist determining why."); uploadFailed = true; return; } } else { // Skip file - too large log.log("Skipping uploading this new file as it exceeds the maximum size allowed by OneDrive: ", path); uploadFailed = true; return; } } } else { log.log("Skipping uploading this new file as parent path is not in the database: ", path); uploadFailed = true; return; } } // delete an item on OneDrive private void uploadDeleteItem(Item item, string path) { log.log("Deleting item from OneDrive: ", path); if (!dryRun) { // we are not in a --dry-run situation, process deletion to OneDrive if ((item.driveId == "") && (item.id == "") && (item.eTag == "")){ // These are empty ... we cannot delete if this is empty .... log.vdebug("item.driveId, item.id & item.eTag are empty ... need to query OneDrive for values"); log.vdebug("Checking OneDrive for path: ", path); JSONValue onedrivePathDetails = onedrive.getPathDetails(path); // Returns a JSON String for the OneDrive Path log.vdebug("OneDrive path details: ", onedrivePathDetails); item.driveId = onedrivePathDetails["parentReference"]["driveId"].str; // Should give something like 12345abcde1234a1 item.id = onedrivePathDetails["id"].str; // This item's ID. Should give something like 12345ABCDE1234A1!101 item.eTag = onedrivePathDetails["eTag"].str; // Should be something like aNjM2NjJFRUVGQjY2NjJFMSE5MzUuMA } try { onedrive.deleteById(item.driveId, item.id, item.eTag); } catch (OneDriveException e) { if (e.httpStatusCode == 404) { // item.id, item.eTag could not be found on driveId log.vlog("OneDrive reported: The resource could not be found."); } else { // Not a 404 response .. is this a 403 response due to OneDrive Business Retention Policy being enabled? if ((e.httpStatusCode == 403) && (accountType != "personal")) { auto errorArray = splitLines(e.msg); JSONValue errorMessage = parseJSON(replace(e.msg, errorArray[0], "")); if (errorMessage["error"]["message"].str == "Request was cancelled by event received. If attempting to delete a non-empty folder, it's possible that it's on hold") { // Issue #338 - Unable to delete OneDrive content when OneDrive Business Retention Policy is enabled // TODO: We have to recursively delete all files & folders from this path to delete // WARN: log.error("\nERROR: Unable to delete the requested remote path from OneDrive: ", path); log.error("ERROR: This error is due to OneDrive Business Retention Policy being applied"); log.error("WORKAROUND: Manually delete all files and folders from the above path as per Business Retention Policy\n"); } } else { // Not a 403 response & OneDrive Business Account / O365 Shared Folder / Library // display what the error is displayOneDriveErrorMessage(e.msg); return; } } } // delete the reference in the local database itemdb.deleteById(item.driveId, item.id); if (item.remoteId != null) { // If the item is a remote item, delete the reference in the local database itemdb.deleteById(item.remoteDriveId, item.remoteId); } } } // update the item's last modified time private void uploadLastModifiedTime(const(char)[] driveId, const(char)[] id, const(char)[] eTag, SysTime mtime) { JSONValue data = [ "fileSystemInfo": JSONValue([ "lastModifiedDateTime": mtime.toISOExtString() ]) ]; JSONValue response; try { response = onedrive.updateById(driveId, id, data, eTag); } catch (OneDriveException e) { if (e.httpStatusCode == 412) { // OneDrive threw a 412 error, most likely: ETag does not match current item's value // Retry without eTag log.vdebug("File Metadata Update Failed - OneDrive eTag / cTag match issue"); log.vlog("OneDrive returned a 'HTTP 412 - Precondition Failed' when attempting file time stamp update - gracefully handling error"); string nullTag = null; response = onedrive.updateById(driveId, id, data, nullTag); } } // save the updated response from OneDrive in the database // Is the response a valid JSON object - validation checking done in saveItem saveItem(response); } // save item details into database private void saveItem(JSONValue jsonItem) { // jsonItem has to be a valid object if (jsonItem.type() == JSONType.object){ // Check if the response JSON has an 'id', otherwise makeItem() fails with 'Key not found: id' if (hasId(jsonItem)) { // Takes a JSON input and formats to an item which can be used by the database Item item = makeItem(jsonItem); // Add to the local database log.vdebug("Adding to database: ", item); itemdb.upsert(item); } else { // log error log.error("ERROR: OneDrive response missing required 'id' element"); log.error("ERROR: ", jsonItem); } } else { // log error log.error("ERROR: An error was returned from OneDrive and the resulting response is not a valid JSON object"); log.error("ERROR: Increase logging verbosity to assist determining why."); } } // Parse and display error message received from OneDrive private void displayOneDriveErrorMessage(string message) { log.error("ERROR: OneDrive returned an error with the following message:"); auto errorArray = splitLines(message); log.error(" Error Message: ", errorArray[0]); // extract 'message' as the reason JSONValue errorMessage = parseJSON(replace(message, errorArray[0], "")); log.error(" Error Reason: ", errorMessage["error"]["message"].str); } // Parse and display error message received from the local file system private void displayFileSystemErrorMessage(string message) { log.error("ERROR: The local file system returned an error with the following message:"); auto errorArray = splitLines(message); log.error(" Error Message: ", errorArray[0]); } // https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_move // This function is only called in monitor mode when an move event is coming from // inotify and we try to move the item. void uploadMoveItem(string from, string to) { log.log("Moving ", from, " to ", to); Item fromItem, toItem, parentItem; if (!itemdb.selectByPath(from, defaultDriveId, fromItem)) { uploadNewFile(to); return; } if (fromItem.parentId == null) { // the item is a remote folder, need to do the operation on the parent enforce(itemdb.selectByPathNoRemote(from, defaultDriveId, fromItem)); } if (itemdb.selectByPath(to, defaultDriveId, toItem)) { // the destination has been overwritten uploadDeleteItem(toItem, to); } if (!itemdb.selectByPath(dirName(to), defaultDriveId, parentItem)) { throw new SyncException("Can't move an item to an unsynced directory"); } if (fromItem.driveId != parentItem.driveId) { // items cannot be moved between drives uploadDeleteItem(fromItem, from); uploadNewFile(to); } else { if (!exists(to)) { log.vlog("uploadMoveItem target has disappeared: ", to); return; } SysTime mtime = timeLastModified(to).toUTC(); JSONValue diff = [ "name": JSONValue(baseName(to)), "parentReference": JSONValue([ "id": parentItem.id ]), "fileSystemInfo": JSONValue([ "lastModifiedDateTime": mtime.toISOExtString() ]) ]; // Perform the move operation on OneDrive JSONValue response; try { response = onedrive.updateById(fromItem.driveId, fromItem.id, diff, fromItem.eTag); } catch (OneDriveException e) { if (e.httpStatusCode == 412) { // OneDrive threw a 412 error, most likely: ETag does not match current item's value // Retry without eTag log.vdebug("File Move Failed - OneDrive eTag / cTag match issue"); log.vlog("OneDrive returned a 'HTTP 412 - Precondition Failed' when attempting to move the file - gracefully handling error"); string nullTag = null; // move the file but without the eTag response = onedrive.updateById(fromItem.driveId, fromItem.id, diff, nullTag); } } // save the move response from OneDrive in the database // Is the response a valid JSON object - validation checking done in saveItem saveItem(response); } } // delete an item by it's path void deleteByPath(string path) { Item item; if (!itemdb.selectByPath(path, defaultDriveId, item)) { throw new SyncException("The item to delete is not in the local database"); } if (item.parentId == null) { // the item is a remote folder, need to do the operation on the parent enforce(itemdb.selectByPathNoRemote(path, defaultDriveId, item)); } try { if (noRemoteDelete) { // do not process remote delete log.vlog("Skipping remote delete as --upload-only & --no-remote-delete configured"); } else { uploadDeleteItem(item, path); } } catch (OneDriveException e) { if (e.httpStatusCode == 404) { log.log(e.msg); } else { // display what the error is displayOneDriveErrorMessage(e.msg); } } } // move a OneDrive folder from one name to another void moveByPath(const(string) source, const(string) destination) { log.vlog("Moving remote folder: ", source, " -> ", destination); // Source and Destination are relative to ~/OneDrive string sourcePath = source; string destinationBasePath = dirName(destination).idup; // if destinationBasePath == '.' then destinationBasePath needs to be "" if (destinationBasePath == ".") { destinationBasePath = ""; } string newFolderName = baseName(destination).idup; string destinationPathString = "/drive/root:/" ~ destinationBasePath; // Build up the JSON changes JSONValue moveData = ["name": newFolderName]; JSONValue destinationPath = ["path": destinationPathString]; moveData["parentReference"] = destinationPath; // Make the change on OneDrive auto res = onedrive.moveByPath(sourcePath, moveData); } // Query Office 365 SharePoint Shared Library site to obtain it's Drive ID void querySiteCollectionForDriveID(string o365SharedLibraryName){ // Steps to get the ID: // 1. Query https://graph.microsoft.com/v1.0/sites?search= with the name entered // 2. Evaluate the response. A valid response will contain the description and the id. If the response comes back with nothing, the site name cannot be found or no access // 3. If valid, use the returned ID and query the site drives // https://graph.microsoft.com/v1.0/sites//drives // 4. Display Shared Library Name & Drive ID string site_id; string drive_id; string webUrl; bool found = false; JSONValue siteQuery; log.log("Office 365 Library Name Query: ", o365SharedLibraryName); try { siteQuery = onedrive.o365SiteSearch(encodeComponent(o365SharedLibraryName)); } catch (OneDriveException e) { log.error("ERROR: Query of OneDrive for Office 365 Library Name failed"); if (e.httpStatusCode == 403) { // Forbidden - most likely authentication scope needs to be updated log.error("ERROR: Authentication scope needs to be updated. Use --logout and re-authenticate client."); return; } else { // display what the error is displayOneDriveErrorMessage(e.msg); return; } } // is siteQuery a valid JSON object & contain data we can use? if ((siteQuery.type() == JSONType.object) && ("value" in siteQuery)) { // valid JSON object foreach (searchResult; siteQuery["value"].array) { // Need an 'exclusive' match here with o365SharedLibraryName as entered log.vdebug("Found O365 Site: ", searchResult); if (o365SharedLibraryName == searchResult["displayName"].str){ // 'displayName' matches search request site_id = searchResult["id"].str; webUrl = searchResult["webUrl"].str; JSONValue siteDriveQuery; try { siteDriveQuery = onedrive.o365SiteDrives(site_id); } catch (OneDriveException e) { log.error("ERROR: Query of OneDrive for Office Site ID failed"); // display what the error is displayOneDriveErrorMessage(e.msg); return; } // is siteDriveQuery a valid JSON object & contain data we can use? if ((siteDriveQuery.type() == JSONType.object) && ("value" in siteDriveQuery)) { // valid JSON object foreach (driveResult; siteDriveQuery["value"].array) { // Display results found = true; writeln("SiteName: ", searchResult["displayName"].str); writeln("drive_id: ", driveResult["id"].str); writeln("URL: ", webUrl); } } else { // not a valid JSON object log.error("ERROR: There was an error performing this operation on OneDrive"); log.error("ERROR: Increase logging verbosity to assist determining why."); return; } } } if(!found) { log.error("ERROR: This site could not be found. Please check it's name and your permissions to access the site."); } } else { // not a valid JSON object log.error("ERROR: There was an error performing this operation on OneDrive"); log.error("ERROR: Increase logging verbosity to assist determining why."); return; } } // Query OneDrive for a URL path of a file void queryOneDriveForFileURL(string localFilePath, string syncDir) { // Query if file is valid locally if (exists(localFilePath)) { // File exists locally, does it exist in the database // Path needs to be relative to sync_dir path string relativePath = relativePath(localFilePath, syncDir); Item item; if (itemdb.selectByPath(relativePath, defaultDriveId, item)) { // File is in the local database cache JSONValue fileDetails; try { fileDetails = onedrive.getFileDetails(item.driveId, item.id); } catch (OneDriveException e) { // display what the error is displayOneDriveErrorMessage(e.msg); return; } if ((fileDetails.type() == JSONType.object) && ("webUrl" in fileDetails)) { // Valid JSON object writeln(fileDetails["webUrl"].str); } } else { // File has not been synced with OneDrive log.error("File has not been synced with OneDrive: ", localFilePath); } } else { // File does not exist locally log.error("File not found on local system: ", localFilePath); } } // Query the OneDrive 'drive' to determine if we are 'in sync' or if there are pending changes void queryDriveForChanges(string path) { // Function variables int validChanges = 0; long downloadSize = 0; string driveId; string folderId; string deltaLink; string thisItemId; string thisItemPath; string syncFolderName; string syncFolderPath; string syncFolderChildPath; JSONValue changes; JSONValue onedrivePathDetails; // Get the path details from OneDrive try { onedrivePathDetails = onedrive.getPathDetails(path); // Returns a JSON String for the OneDrive Path } catch (OneDriveException e) { if (e.httpStatusCode == 404) { // Requested path could not be found log.error("ERROR: The requested path to query was not found on OneDrive"); return; } } if(isItemRemote(onedrivePathDetails)){ // remote changes driveId = onedrivePathDetails["remoteItem"]["parentReference"]["driveId"].str; // Should give something like 66d53be8a5056eca folderId = onedrivePathDetails["remoteItem"]["id"].str; // Should give something like BC7D88EC1F539DCF!107 syncFolderName = onedrivePathDetails["name"].str; // A remote drive item will not have ["parentReference"]["path"] syncFolderPath = ""; syncFolderChildPath = ""; } else { driveId = defaultDriveId; folderId = onedrivePathDetails["id"].str; // Should give something like 12345ABCDE1234A1!101 syncFolderName = onedrivePathDetails["name"].str; if (hasParentReferencePath(onedrivePathDetails)) { syncFolderPath = onedrivePathDetails["parentReference"]["path"].str; syncFolderChildPath = syncFolderPath ~ "/" ~ syncFolderName ~ "/"; } else { // root drive item will not have ["parentReference"]["path"] syncFolderPath = ""; syncFolderChildPath = ""; } } // Query Database for the deltaLink deltaLink = itemdb.getDeltaLink(driveId, folderId); const(char)[] idToQuery; if (driveId == defaultDriveId) { // The drive id matches our users default drive id idToQuery = defaultRootId.dup; } else { // The drive id does not match our users default drive id // Potentially the 'path id' we are requesting the details of is a Shared Folder (remote item) // Use folderId idToQuery = folderId; } // Query OneDrive changes changes = onedrive.viewChangesById(driveId, idToQuery, deltaLink); // Are there any changes on OneDrive? if (count(changes["value"].array) != 0) { // Were we given a remote path to check if we are in sync for, or the root? if (path != "/") { // we were given a directory to check, we need to validate the list of changes against this path only foreach (item; changes["value"].array) { // Is this change valid for the 'path' we are checking? if (hasParentReferencePath(item)) { thisItemId = item["parentReference"]["id"].str; thisItemPath = item["parentReference"]["path"].str; } else { thisItemId = item["id"].str; // Is the defaultDriveId == driveId if (driveId == defaultDriveId){ // 'root' items will not have ["parentReference"]["path"] if (isItemRoot(item)){ thisItemPath = ""; } else { thisItemPath = item["parentReference"]["path"].str; } } else { // A remote drive item will not have ["parentReference"]["path"] thisItemPath = ""; } } if ( (thisItemId == folderId) || (canFind(thisItemPath, syncFolderChildPath)) || (canFind(thisItemPath, folderId)) ){ // This is a change we want count validChanges++; if ((isItemFile(item)) && (hasFileSize(item))) { downloadSize = downloadSize + item["size"].integer; } } } // Are there any valid changes? if (validChanges != 0){ writeln("Selected directory is out of sync with OneDrive"); if (downloadSize > 0){ downloadSize = downloadSize / 1000; writeln("Approximate data to transfer: ", downloadSize, " KB"); } } else { writeln("No pending remote changes - selected directory is in sync"); } } else { writeln("Local directory is out of sync with OneDrive"); foreach (item; changes["value"].array) { if ((isItemFile(item)) && (hasFileSize(item))) { downloadSize = downloadSize + item["size"].integer; } } if (downloadSize > 0){ downloadSize = downloadSize / 1000; writeln("Approximate data to transfer: ", downloadSize, " KB"); } } } else { writeln("No pending remote changes - in sync"); } } // Create a fake OneDrive response suitable for use with saveItem JSONValue createFakeResponse(string path) { import std.digest.sha; // Generate a simulated JSON response which can be used // At a minimum we need: // 1. eTag // 2. cTag // 3. fileSystemInfo // 4. file or folder. if file, hash of file // 5. id // 6. name // 7. parent reference SysTime mtime = timeLastModified(path).toUTC(); // real id / eTag / cTag are different format for personal / business account auto sha1 = new SHA1Digest(); ubyte[] hash1 = sha1.digest(path); JSONValue fakeResponse; if (isDir(path)) { // path is a directory fakeResponse = [ "id": JSONValue(toHexString(hash1)), "cTag": JSONValue(toHexString(hash1)), "eTag": JSONValue(toHexString(hash1)), "fileSystemInfo": JSONValue([ "createdDateTime": mtime.toISOExtString(), "lastModifiedDateTime": mtime.toISOExtString() ]), "name": JSONValue(baseName(path)), "parentReference": JSONValue([ "driveId": JSONValue(defaultDriveId), "driveType": JSONValue(accountType), "id": JSONValue(defaultRootId) ]), "folder": JSONValue("") ]; } else { // path is a file // compute file hash - both business and personal responses use quickXorHash string quickXorHash = computeQuickXorHash(path); fakeResponse = [ "id": JSONValue(toHexString(hash1)), "cTag": JSONValue(toHexString(hash1)), "eTag": JSONValue(toHexString(hash1)), "fileSystemInfo": JSONValue([ "createdDateTime": mtime.toISOExtString(), "lastModifiedDateTime": mtime.toISOExtString() ]), "name": JSONValue(baseName(path)), "parentReference": JSONValue([ "driveId": JSONValue(defaultDriveId), "driveType": JSONValue(accountType), "id": JSONValue(defaultRootId) ]), "file": JSONValue([ "hashes":JSONValue([ "quickXorHash": JSONValue(quickXorHash) ]) ]) ]; } log.vdebug("Generated Fake OneDrive Response: ", fakeResponse); return fakeResponse; } } onedrive-2.3.13/src/upload.d000066400000000000000000000210071360252424000156310ustar00rootroot00000000000000import std.algorithm, std.conv, std.datetime, std.file, std.json; import std.stdio, core.thread, std.string; import progress, onedrive, util; static import log; private long fragmentSize = 10 * 2^^20; // 10 MiB struct UploadSession { private OneDriveApi onedrive; private bool verbose; // https://dev.onedrive.com/resources/uploadSession.htm private JSONValue session; // path where to save the session private string sessionFilePath; this(OneDriveApi onedrive, string sessionFilePath) { assert(onedrive); this.onedrive = onedrive; this.sessionFilePath = sessionFilePath; this.verbose = verbose; } JSONValue upload(string localPath, const(char)[] parentDriveId, const(char)[] parentId, const(char)[] filename, const(char)[] eTag = null) { // Fix https://github.com/abraunegg/onedrive/issues/2 // More Details https://github.com/OneDrive/onedrive-api-docs/issues/778 SysTime localFileLastModifiedTime = timeLastModified(localPath).toUTC(); localFileLastModifiedTime.fracSecs = Duration.zero; JSONValue fileSystemInfo = [ "item": JSONValue([ "@name.conflictBehavior": JSONValue("replace"), "fileSystemInfo": JSONValue([ "lastModifiedDateTime": localFileLastModifiedTime.toISOExtString() ]) ]) ]; // Try to create the upload session for this file session = onedrive.createUploadSession(parentDriveId, parentId, filename, eTag, fileSystemInfo); if ("uploadUrl" in session){ session["localPath"] = localPath; save(); return upload(); } else { // there was an error log.vlog("Create file upload session failed ... skipping file upload"); // return upload() will return a JSONValue response, create an empty JSONValue response to return JSONValue response; return response; } } /* Restore the previous upload session. * Returns true if the session is valid. Call upload() to resume it. * Returns false if there is no session or the session is expired. */ bool restore() { if (exists(sessionFilePath)) { log.vlog("Trying to restore the upload session ..."); // We cant use JSONType.object check, as this is currently a string // We cant use a try & catch block, as it does not catch std.json.JSONException auto sessionFileText = readText(sessionFilePath); if(canFind(sessionFileText,"@odata.context")) { session = readText(sessionFilePath).parseJSON(); } else { log.vlog("Upload session resume data is invalid"); remove(sessionFilePath); return false; } // Check the session resume file for expirationDateTime if ("expirationDateTime" in session){ // expirationDateTime in the file auto expiration = SysTime.fromISOExtString(session["expirationDateTime"].str); if (expiration < Clock.currTime()) { log.vlog("The upload session is expired"); return false; } if (!exists(session["localPath"].str)) { log.vlog("The file does not exist anymore"); return false; } // Can we read the file - as a permissions issue or file corruption will cause a failure on resume // https://github.com/abraunegg/onedrive/issues/113 if (readLocalFile(session["localPath"].str)){ // able to read the file // request the session status JSONValue response; try { response = onedrive.requestUploadStatus(session["uploadUrl"].str); } catch (OneDriveException e) { // handle any onedrive error response if (e.httpStatusCode == 400) { log.vlog("Upload session not found"); return false; } } // do we have a valid response from OneDrive? if (response.type() == JSONType.object){ // JSON object if (("expirationDateTime" in response) && ("nextExpectedRanges" in response)){ // has the elements we need session["expirationDateTime"] = response["expirationDateTime"]; session["nextExpectedRanges"] = response["nextExpectedRanges"]; if (session["nextExpectedRanges"].array.length == 0) { log.vlog("The upload session is completed"); return false; } } else { // bad data log.vlog("Restore file upload session failed - invalid data response from OneDrive"); if (exists(sessionFilePath)) { remove(sessionFilePath); } return false; } } else { // not a JSON object log.vlog("Restore file upload session failed - invalid response from OneDrive"); if (exists(sessionFilePath)) { remove(sessionFilePath); } return false; } return true; } else { // unable to read the local file log.vlog("Restore file upload session failed - unable to read the local file"); if (exists(sessionFilePath)) { remove(sessionFilePath); } return false; } } else { // session file contains an error - cant resume log.vlog("Restore file upload session failed - cleaning up session resume"); if (exists(sessionFilePath)) { remove(sessionFilePath); } return false; } } return false; } JSONValue upload() { // Response for upload JSONValue response; // session JSON needs to contain valid elements long offset; long fileSize; if ("nextExpectedRanges" in session){ offset = session["nextExpectedRanges"][0].str.splitter('-').front.to!long; } if ("localPath" in session){ fileSize = getSize(session["localPath"].str); } if ("uploadUrl" in session){ // Upload file via session created // Upload Progress Bar size_t iteration = (roundTo!int(double(fileSize)/double(fragmentSize)))+1; Progress p = new Progress(iteration); p.title = "Uploading"; long fragmentCount = 0; while (true) { fragmentCount++; log.vdebugUpload("Fragment: ", fragmentCount, " of ", iteration); p.next(); long fragSize = fragmentSize < fileSize - offset ? fragmentSize : fileSize - offset; // If the resume upload fails, we need to check for a return code here try { response = onedrive.uploadFragment( session["uploadUrl"].str, session["localPath"].str, offset, fragSize, fileSize ); } catch (OneDriveException e) { // there was an error response from OneDrive when uploading the file fragment // insert a new line as well, so that the below error is inserted on the console in the right location log.vlog("\nFragment upload failed - received an exception response from OneDrive"); // display what the error is displayOneDriveErrorMessage(e.msg); // retry fragment upload in case error is transient log.vlog("Retrying fragment upload"); try { response = onedrive.uploadFragment( session["uploadUrl"].str, session["localPath"].str, offset, fragSize, fileSize ); } catch (OneDriveException e) { // OneDrive threw another error on retry log.vlog("Retry to upload fragment failed"); // display what the error is displayOneDriveErrorMessage(e.msg); // set response to null as the fragment upload was in error twice response = null; } } // was the fragment uploaded without issue? if (response.type() == JSONType.object){ offset += fragmentSize; if (offset >= fileSize) break; // update the session details session["expirationDateTime"] = response["expirationDateTime"]; session["nextExpectedRanges"] = response["nextExpectedRanges"]; save(); } else { // not a JSON object - fragment upload failed log.vlog("File upload session failed - invalid response from OneDrive"); if (exists(sessionFilePath)) { remove(sessionFilePath); } // set response to null as error response = null; return response; } } // upload complete p.next(); writeln(); if (exists(sessionFilePath)) { remove(sessionFilePath); } return response; } else { // session elements were not present log.vlog("Session has no valid upload URL ... skipping this file upload"); // return an empty JSON response response = null; return response; } } // Parse and display error message received from OneDrive private void displayOneDriveErrorMessage(string message) { log.error("ERROR: OneDrive returned an error with the following message:"); auto errorArray = splitLines(message); log.error(" Error Message: ", errorArray[0]); // extract 'message' as the reason JSONValue errorMessage = parseJSON(replace(message, errorArray[0], "")); log.error(" Error Reason: ", errorMessage["error"]["message"].str); } private void save() { std.file.write(sessionFilePath, session.toString()); } } onedrive-2.3.13/src/util.d000066400000000000000000000171601360252424000153270ustar00rootroot00000000000000import std.base64; import std.conv; import std.digest.crc, std.digest.sha; import std.net.curl; import std.datetime; import std.file; import std.path; import std.regex; import std.socket; import std.stdio; import std.string; import std.algorithm; import std.uri; import qxor; static import log; shared string deviceName; static this() { deviceName = Socket.hostName; } // gives a new name to the specified file or directory void safeRename(const(char)[] path) { auto ext = extension(path); auto newPath = path.chomp(ext) ~ "-" ~ deviceName; if (exists(newPath ~ ext)) { int n = 2; char[] newPath2; do { newPath2 = newPath ~ "-" ~ n.to!string; n++; } while (exists(newPath2 ~ ext)); newPath = newPath2; } newPath ~= ext; rename(path, newPath); } // deletes the specified file without throwing an exception if it does not exists void safeRemove(const(char)[] path) { if (exists(path)) remove(path); } // returns the crc32 hex string of a file string computeCrc32(string path) { CRC32 crc; auto file = File(path, "rb"); foreach (ubyte[] data; chunks(file, 4096)) { crc.put(data); } return crc.finish().toHexString().dup; } // returns the sha1 hash hex string of a file string computeSha1Hash(string path) { SHA1 sha; auto file = File(path, "rb"); foreach (ubyte[] data; chunks(file, 4096)) { sha.put(data); } return sha.finish().toHexString().dup; } // returns the quickXorHash base64 string of a file string computeQuickXorHash(string path) { QuickXor qxor; auto file = File(path, "rb"); foreach (ubyte[] data; chunks(file, 4096)) { qxor.put(data); } return Base64.encode(qxor.finish()); } // converts wildcards (*, ?) to regex Regex!char wild2regex(const(char)[] pattern) { string str; str.reserve(pattern.length + 2); str ~= "^"; foreach (c; pattern) { switch (c) { case '*': str ~= "[^/]*"; break; case '.': str ~= "\\."; break; case '?': str ~= "[^/]"; break; case '|': str ~= "$|^"; break; case '+': str ~= "\\+"; break; case ' ': str ~= "\\s+"; break; case '/': str ~= "\\/"; break; default: str ~= c; break; } } str ~= "$"; return regex(str, "i"); } // returns true if the network connection is available bool testNetwork() { try { HTTP http = HTTP("https://login.microsoftonline.com"); http.dnsTimeout = (dur!"seconds"(5)); http.method = HTTP.Method.head; http.perform(); return true; } catch (SocketException) { return false; } } // Can we read the file - as a permissions issue or file corruption will cause a failure // https://github.com/abraunegg/onedrive/issues/113 // returns true if file can be accessed bool readLocalFile(string path) { try { // attempt to read up to the first 1 byte of the file // validates we can 'read' the file based on file permissions read(path,1); } catch (std.file.FileException e) { // unable to read the new local file log.log("Skipping uploading this file as it cannot be read (file permissions or file corruption): ", path); return false; } return true; } // calls globMatch for each string in pattern separated by '|' bool multiGlobMatch(const(char)[] path, const(char)[] pattern) { foreach (glob; pattern.split('|')) { if (globMatch!(std.path.CaseSensitive.yes)(path, glob)) { return true; } } return false; } bool isValidName(string path) { // Restriction and limitations about windows naming files // https://msdn.microsoft.com/en-us/library/aa365247 // https://support.microsoft.com/en-us/help/3125202/restrictions-and-limitations-when-you-sync-files-and-folders // allow root item if (path == ".") { return true; } bool matched = true; string itemName = baseName(path); auto invalidNameReg = ctRegex!( // Leading whitespace and trailing whitespace/dot `^\s.*|^.*[\s\.]$|` ~ // Invalid characters `.*[<>:"\|\?*/\\].*|` ~ // Reserved device name and trailing .~ `(?:^CON|^PRN|^AUX|^NUL|^COM[0-9]|^LPT[0-9])(?:[.].+)?$` ); auto m = match(itemName, invalidNameReg); matched = m.empty; // Additional explicit validation checks if (itemName == "Icon") {matched = false;} if (itemName == ".lock") {matched = false;} if (itemName == "desktop.ini") {matched = false;} // _vti_ cannot appear anywhere in a file or folder name if(canFind(itemName, "_vti_")){matched = false;} // Item name cannot equal '~' if (itemName == "~") {matched = false;} // return response return matched; } bool containsBadWhiteSpace(string path) { // allow root item if (path == ".") { return true; } // https://github.com/abraunegg/onedrive/issues/35 // Issue #35 presented an interesting issue where the filename contained a newline item // 'State-of-the-art, challenges, and open issues in the integration of Internet of'$'\n''Things and Cloud Computing.pdf' // When the check to see if this file was present the GET request queries as follows: // /v1.0/me/drive/root:/.%2FState-of-the-art%2C%20challenges%2C%20and%20open%20issues%20in%20the%20integration%20of%20Internet%20of%0AThings%20and%20Cloud%20Computing.pdf // The '$'\n'' is translated to %0A which causes the OneDrive query to fail // Check for the presence of '%0A' via regex string itemName = encodeComponent(baseName(path)); auto invalidWhitespaceReg = ctRegex!( // Check for \n which is %0A when encoded `%0A` ); auto m = match(itemName, invalidWhitespaceReg); return m.empty; } bool containsASCIIHTMLCodes(string path) { // https://github.com/abraunegg/onedrive/issues/151 // If a filename contains ASCII HTML codes, regardless of if it gets encoded, it generates an error // Check if the filename contains an ASCII HTML code sequence auto invalidASCIICode = ctRegex!( // Check to see if &#XXXX is in the filename `(?:&#|&#[0-9][0-9]|&#[0-9][0-9][0-9]|&#[0-9][0-9][0-9][0-9])` ); auto m = match(path, invalidASCIICode); return m.empty; } unittest { assert(multiGlobMatch(".hidden", ".*")); assert(multiGlobMatch(".hidden", "file|.*")); assert(!multiGlobMatch("foo.bar", "foo|bar")); // that should detect invalid file/directory name. assert(isValidName(".")); assert(isValidName("./general.file")); assert(!isValidName("./ leading_white_space")); assert(!isValidName("./trailing_white_space ")); assert(!isValidName("./trailing_dot.")); assert(!isValidName("./includesin the path")); assert(!isValidName("./includes:in the path")); assert(!isValidName(`./includes"in the path`)); assert(!isValidName("./includes|in the path")); assert(!isValidName("./includes?in the path")); assert(!isValidName("./includes*in the path")); assert(!isValidName("./includes / in the path")); assert(!isValidName(`./includes\ in the path`)); assert(!isValidName(`./includes\\ in the path`)); assert(!isValidName(`./includes\\\\ in the path`)); assert(!isValidName("./includes\\ in the path")); assert(!isValidName("./includes\\\\ in the path")); assert(!isValidName("./CON")); assert(!isValidName("./CON.text")); assert(!isValidName("./PRN")); assert(!isValidName("./AUX")); assert(!isValidName("./NUL")); assert(!isValidName("./COM0")); assert(!isValidName("./COM1")); assert(!isValidName("./COM2")); assert(!isValidName("./COM3")); assert(!isValidName("./COM4")); assert(!isValidName("./COM5")); assert(!isValidName("./COM6")); assert(!isValidName("./COM7")); assert(!isValidName("./COM8")); assert(!isValidName("./COM9")); assert(!isValidName("./LPT0")); assert(!isValidName("./LPT1")); assert(!isValidName("./LPT2")); assert(!isValidName("./LPT3")); assert(!isValidName("./LPT4")); assert(!isValidName("./LPT5")); assert(!isValidName("./LPT6")); assert(!isValidName("./LPT7")); assert(!isValidName("./LPT8")); assert(!isValidName("./LPT9")); } onedrive-2.3.13/tests/000077500000000000000000000000001360252424000145535ustar00rootroot00000000000000onedrive-2.3.13/tests/bad-file-name.tar.xz000066400000000000000000017202501360252424000203130ustar00rootroot000000000000007zXZִF!t/B](KHg-}~EI>CJ!"8*uBP`L¤Wlkmzm]-"k-{eRwZyQ68Ҁ.|:(]pAB׭]·凂 taVD҂6A|-$’`'smvWځQWp΁{ 2,˦S`,| ]`ssSδZ$iF~ݗ"PE#]GrTlKv}-QJZVqV7 gלŢ Vji 5x}H}Q '㼰3ā,R%{ΐ8e%3OcŶƼfldXwt~䊘(>X6ЉV70c\Q;3P6mK|{y#uP"f@3y?F€ITHw֗P.q/f4Wp|SP6O#X'h(4fG9g!L=a` PFcŒp#cD5xy J'݉DoW86J# ]TmgK8唱hƻW޹8VBRqۓD) lCm\ؽ Tt 'PÎٵmj%H .* tG[Aq}'sbEp7*^4T&6͡cwӿ4V%Ų1#WT~"59O^a^:5{6W.@>l/['Ȣ{]2-\uHcMO#/]FᄊPcΧj9mUm;[rZ px'/ 4#@@N?'ž[SG/z犀b^$s拓BwGHyLLY ͍9Moa֯/F-)ZѬɜ8Y_2m0Lk=H(i䞩a'Fc4=vԗcyޭE PTc 5r {YJo~O$q^&./*G|<A.ɸGfĐ`} z3(T,WB=G M{źhyGulto׶?>mkXZ1YpH8~bsUH\acge ۊM: OF!E(8!/ w"GuM"Qnoߩr=}AB;;ZMgiY\+7,[f{ $g((U͍}J"K (:+aS=18+e}+1݁;0lU d[WS`m =Rp|_9v4h(U9$[w'7}bBC%;9A _ ˩|.oȲtvAokݟ:! S5f':qR=2'r"|`ToArauJ,{c}(!h]vCVI׋ĢV:%mXYe?%y+ƎlCmsK".ZS"v7'c> #[Mlx` a}(^D!tN 2¾Cƒ#zC$wf^̐Fkt<Ň$D\t|@i&kgg_wp\a:Z΢gs08"Z)KJy>ȴk퇰 0N7.S}T7rh'egu5o^oT%1G>{e!x5M) ^+| fk!, HX#eKGKr7`R "ęmJ~B2 {ΟF~ImG岬6e !KAA.WFj"*4 D"4~|Kipr'h,5B*ᶩcd*iTޏB.aX[1.w6 iHFE3%U p99)tm[qa~pٔLJG 8䏓MKs!e$OP=X/xQX[P hJ^{eyy2`ԟ]kNn.U`;;K2ēd_jIz 0H>/UGu  Md!-;?9LtVPwU \>X ]f `%@QB͢QFR}/`iZLB'FÖj1Z:8&mɷL:ڗPP%9U{V{HCe'K,$(%ݛ_/n_]ڃRC{^/)B-z+3\ᯪYH_6/%=.ؠ0WcHJpBS8(%(Z!EAa PB9kW1d3jY+vD'5nXCM-l|C;DzĴahyxyqqxeg' tcx{:oH\q]I& ݗ(# aFMlͧnthj;ZQ.p%Xqd{j<v17T Lbg2Ūd:UkXQ4;ŴV.<;wJ@;})):A2a6%uR]h4qX x jNK2G2U3yq/ܴyX }pѱ ck=K9) ]dp8_oH_"{"TnC` =S0ug *.6_*ajB6;x6#APn-:~6poCp?úh=8O 2qNO3iF&Y=2 S39ݸnh|L$L*ڡpϣ^p)>705u\YYHGz]Ya P"(O"p\(f2-J4/\.<, uKMs7h¹t)޳~*64:#?Ԣ8l&I w}ɢD_g4'K!QVQߘ2VUK0+;2vΒ|'mnqMd$]2\.ÝO;CkV<ײP.m[2nCS\֯[l3l M'hQm؅ZYiJ_,+9_8ڹvDrtv)@P~wlA0jTLdsjgt.&'oT9SM:tI *3Cǁo l>K{=:V~KAP/9ą xk?ɵ?83Y؊>"]@ܠJ AjQ|S\@(m E0*)gq" \7w/]y._0z{3~C;4+$@7fկ5OH1|5f{ tj v(E\1֦r @"~=, >Fl)Pc L<|AZ4QJf aj՞`BtFe/^_8DkVV"8|WNIʄ&oa`9Q;&.ٲV=H6?H7Uvv$9+bQOLÜ̼FA؎kCn98ei$*5Jc3[vjxڬR#$;ͣxMw;ُW{'ǴbY ,QQH1TmG)uH>&1NnV^9\zNJ4ZuM$ſdPfL3,8$)cf2F@J60 6}@Ll¡J&^s>SJi"ܷez.۝JqqL\uXvGCNas^Ob]s=Hݨ ,K ~<%Km1FbyM@"BSb104DvpIҋ+1\ҐEl⳰}istzznѹT^bCN}S] CC$RϷn%X/"vg;p)ث, GHf0t$ /RI4'0,IQSj+ PhDS hFP!E.PGl37]9-@Ҟ?gM Or7s<xgGKp\̅%9Q "V|~ZSrtYPH3`𢃹1 rаF2&TtcQގF"]Γ7ى3߸ ;Us=cs5H&˸tpX| hqobC6i@Hh5AҨն-Ec~iExy1lyᠣr˗N)1>V;&ݺKk#ӖsNMiX")i89y{; !"OɀJ͂s:#_LmNԍn I +~e?7b f$ɓwk!8!te!^hhd*F);J'NAdc̓:ѧ&?v |JVa7OդTӪVjAcr"Y0`Kis@(#yS^,! =&kpbM+a7T2#1$&t)AI=S~9 ͉CJO:>Eʀ貕i$'"z)4=L *04r I F".TPwT:Czª" L*=l<~Bs~!&E6{f9k<^ ˚o /Ns2ćZByRjDmN:m[.+.. hF9NDTNr5! #}9Qkt)I) VBAa/nfaՔQz9;rł1I mwlFiܰ2h qz}}oK(k/YhwV:Sg m+qڜHyYۃ3{%@prI&|gtw~ְL)?0MuMv<gk^˕}}[d¹FG`CKayplU*u诛 mVÄ(J kCq2geCMP}s?g{zXNSRVjsR"|ɏ w U|T%a֟ȳ+B36UVa%Tre]:p'r'Ƚ0r06fXVBoO&V4`%/Nh2SkQaceGqԪE@SV!sF%sDG91a+kxq=nHbGk&8''hnW Lؗ1ȕXS5`~@|1I#IB@}{ gɑn^Z"ϭə.=,v\*g"@QO!:كšňCLm&j?UQo LjbIsxRj722칼c0i&'rUe?XYoxʉӲ?pXY݁߈&gelȹ3xLKn! ZA\oGW /m_ӍL S@ϵdiV TGߓ.gKvV@z&50,k6vyB7yocH2}Տ9 =U_!rs녶LYTਨ.$4 2 8J}p:%硰z> |QlZSo/ \ao^\DKNˤMr[xEP~?@?|׼ui5zg]ʳ,RExJ 0'֗rX1`\mKJforwy9۞MV[SIÃ00nIөO࡙kg}w?c/N|q~#CcAՌ("E5XlqAd*UІDH{kYQLL8R\F66)Ϯ,Y_ᖘZ.8%JPMfjS;ќ\hh`ƭk|&*h8J. Nxsy"$rhzs/:{c kؚ|:` 1wBx&0_X Gd/, Qd7f(d)I4h5g!7dF; ]@BlJr1G<{` `fCU`zш/cO =^oI)Q=QӰ ( M'َ_s~c,Ia)dH_8A/32i~A8Ic\ip$Jh%ND#Dާ"9F4B}oVBtsJMUL%̩(\Xk;O'`(W:1r U sq4p;(8<Fà5LGdsyTlGx #0-Z0H?\ɭRj|DwuR}?SCAz_[ %QȅL<҅:~OY'hcwO'7yԝw~A~0]"Ѩg LiOC$Ƞ;c'%trRޫǒ 8҇K]gtRT[q^|zaLOq6p^~:"٣ / $rѦ(r1?-Kգn}WX4B<\䚣MYxs8ZY'őHZGɻcm/N]h:RDz664ԄS7m&/cST_7FZŕZڙԻLvK%JjHKgh3 cQèM] % nIV&',VB/3~Ԍ:SeA3Fx766>>$mC.mLZwd|Լ5E,ynv4@gȨca8C~FW*.B߹^*9W֤2yMg_{zT&VLۻ+|.k ` Dm o Lx?2-[_L2ɛ7pMA8׶o{L턎#`eܬ7Zg n=ENl)PAkSK_ SǻˡxFGAo] upiVlKBЁgvͮevrkr&)@:tb+T" $ 䌩_*+ؖFd };jrnS].`;>IP8{Gyȿ[`3Y;$a/ [uOGi؉ 3g 9]R ù@)\=Yt *}I(jbWrӳoѻUo:N>VM } c;gxu2kDv,1g4Z܏yGaleP*8!؇ȐPo6]%Yob< 5^WCX*@L0G7)=uOBp FDK|G)7"$z؇D% JC1&"PLh4_[ mvǤ֙;&=Nt+Jnc`0'Wi,T!5)VQ7)DoTED\JnV|ssGLm> IP64]F~Gxih+\ĭt^N#QoX<@GCoV)1 }5*bNI]h&f*Yї,aSHOf叹ݶ0^JYנ`H=KhRYv1ObAܣ XahqL:dr ۧMc@~A}ߚSFT܎g cD_5%0~yI$yopɸw8p"(S@tLwM] Gs HzS"vG1C=FIė1\λ :+H]YL4 (Jej(Ud&7xJ0qh&t[{ʀq5h9bxA쐔q4vnvn@xzYՅA2~16giT>$~Ԯo!O&W{ n,R #L8~hk[$L&m/(ʘWx'4>#9[23n'|-~ S{W3Q1M )lnE5Ϣ[W^>岌)f` N 0a-K[l 0S_>V$G!+״%vxn\,{:F۾LIU%!ncz3+t8Oڽ%1\l;o1rVLCvBNk%(lQ׷d:mKF;cwB.:eUt,` LvT`n.͇ $VvRrK} $ߋϨI32ٗ (owt0YX8?rCI [U4کMr]U;Ju~\󥃂?!EZ9v:f/8XUwJP+糇HaTH.VCB+=/F!5nŷ؀^,+(O>ajUsG ®%1Y.lS9=rރ c*+>fKۨ-tؓ B,ϗ*C;aAQPַ $}j.vEtpNX2U'Ґ*ۍ,ء)h5|j}!}tm a{;k1p4QXe!^J䀦AOn/r%&[PHΎ}]BtgU1I` /mV} Bxʅoq#Tbp)i^ A3nTʞ!Cϒ9ҢAgСM,/Q0s06=Z#}a]8%* Pq; ~վ.Ђ[MRs5êMLŭ?%`9/X[cV7zҏtqYPPާJ /~`)7Og Z)2t)n+Q, =I&t3nа+t둴x-n9T/׸(5DplȌ%>ԋz8A>ڔr8j_tsNcT\ &{">t<RwrY'RRn&LZd-j:fTvRub<L/xI}>!ZU6>ou5CH)9? {V]"5, :3ʢ ypP0@;Y98>#=f$iǾC5P ^+$7lk7ix4 8(Oy.ґd=NI4/ÎβvBbG}6f^Bqf¨*',h-fE8NUe\K,y-;݋ U6̓CՌ \׺>3pr91-qS\O83-ҟ@_KUS,?-+$xӉOƂ[[5!DReZ2^=RҸ H5,*m"GB4 謿m=u2X=߼?ήN㼳J)9iI䗍N/wD,3i22I# Xei^zK #!y 6z9H6DR K.9eLy'+\28`t"Okdlp׷tl1eeuBzxbW|2]5b3>2"\?B U~2%vKA;_㽰Ne8nz<ƅ 0SY2sSLnHHh Z/Og"Ԫ ]ozIޡ_.j_Tx:xx!@竏_.2ScN=bU^MAHHeJ]dnNMZn U'?pj+,EߗCS9)ߴ䰬GCӒMl<]xNLⰦ qc~ٕ? ; Wl<ִM؋31%Xy[–aSv ^npa2`Ӯ[w xӜI7sTq\w@ő[Źcq% +`A{ /YJРd"`f3;!x|o32;U1˗ :tQ{Fe@8c^kqlmLkirehE$ on=L\&BepA:eDiN6`}-CQ΂y*=B't+L4i#l~@i|LⱔhV'a,.^Ra94KfsU\Am t cJkC.aTUE47(  ;\]b3i qJɨJ{dy[e”ph4_a0^S1-n4&\QI9XuU'|0^+˱L5Wq<*)nwx/B.J17$F7-P,%q?{S}r6*uml`>xUxnț/ _Ta/;f\mqq$\* Pjw=z;7dQ<*$ p/oSEzFMoL`\-rt# io6mL(V Oal.mtdgJZ'Z$tS `{mYmPj!.!cW1s <@ ydaP\ sx÷ԭ&tjdBma€!c"+ō:dloˀH90+@_HNctM0n.t0U:bX(Qi,ݶ(N.s~ 6Frc'W#SsoO,F} ս%'*ͿNJ-V]+R HXE)Lo}o8Cb;!$Ze ͳ7mU^'s0k]o> )gd>.7S|n:s֓_ۿgM# jfY I8E+7,;9. :qZY9G3;Ieiub9CB,=&~,!rT%QS KR?E=QkJ:5puhɵ ɉ˄e6}̺q v&_W`2i+ZO˩EjA$ wH8 f{X4ʖ։=Mcj\\,YRPY:"I3*l#" 2Yu ,BUw>*5NhLLmj}?9CQ~+HYDp0-ʨ zvY 5ЂS$PPu hÜ ^)42{Y?f؄;KDBԢQ71M.RG$Gg=I/V9eu]8)nBibJYHwn\]ڌF3 PhPmݙNXP/Jn<0=0Z^>7ho"n?)~SC\ُ+<11M/<+iyNf.(Sهܝ,S+J&$%N_GNi<7}*Oه`RW+Zx~EPA9_L_͓j}iX=Kgޕ,m[o !h:3Y ) \^00[+7Sz˔Rk_yڋi 8⿪J |#]h]fƒWP2s T12xn VXe~]B؀8\D: U`l0A,h? PC2Iʍ#vǺ8{&#zͬbfv .mm8xNE#K b6)oaRf̛sx ׈%ߤ 0dw+*92􂳽i}\\f /)Pt%x)Gn/Z@k6[5}~-څ~+mHgLx͑(-=44_s^8 tǪ/IOhٲG {MV+M_Av;4M&_rϟ-%6V^Ǜ|0(&1qB~NZ_Y]37"J*aGe ^k54uIŐPH1H$; ẓn?A][jkB֦Ú%ȡQ]{Ie%,Z k4fuu{Pq26Ga !ـIx9Q֨H~鳼4Ɓ*")a:c@ew5#txCv?9Q-͉y~*0,z.ldb/6cG_d/ckXF&(^oǛJ3q/ܝ݊SGFSTPc@t*rIU1тJgz'GHO nDJd:=&,G*@Yi,# 3{ 4=)Uuь 6eH|ײn:h/<72ͱߡO/!4zË,2i?g IvPa))hsLH Sxԟ%dkں@bC̸!ED|kl dthi-~!rA&Q[FbVq I>xs # E vi#@ˣxr\5|v,9 bq`O[Zdr<(S5%NR-;@=^=YQ|L^8ohuOrr {{FSX$x#4`ta 5t| { .!c+z?Nd)h3m%u֓ ^;Ƙӑ7 DT0!͹}0m-w\ߏ>J9lt kKJCвe>$ajA:cc n^#/h$/Pcj8<eQGW@2>4EX7hn 0imVnfrs1;C\g1/ "0Fbg4~^ԵPlm;b-wIr9}Yjs'7Hm=ړd`Qe_1>qqrך|6\RQQՓG2,88[ͮ<_ 'dh5hF5[}͛l_N fX rWaNڗ(=>f/3Uގ^E"(Rӏo5K_:'ya"IQ¯k7;JWc4 2`62qhH]{վ6!"vT!sO1+O` B9sKӐҬ8IwfDEp] }oܖG" d$ ѷY\y~knbmٳ㙽p8 W+M,-isy=!+)E N2i *|y u0nj:w evv?{.=!}_N4XNg!Սb6k[9ͦƻm;bk7?嚮d߀P.ŸUR USb n9nCGQ#J3 tTZm}G*:rh6J*TGOcl9%qP.2"ok-em#.?gN%|Hv0(g#DdqU#W>[˒'B+A ߚY ]be"F<}L6BLA(_ea5T~g43"5^RVChqbbU4xn7޾`Ђ^Sb;Թ)`3"24Ufpy8Q"|rlK8%KhX)J#D3pTFGޤn!+ٿߠ;9ӭC%zfhKxk/w2¾oۯubY]1Z? |A0C}˰\_`*>;{v&][;G= (A!.559z>Vz5?cv-_D1H8K^_&KX9eE-! NE\Ke/#J8:;{!)*:aW8cH*ץ{"cM@%N8Im81brD9~u0F+ =9 J5;ͺAc]guE)?),rTdiP7~8z d-w6o;{QD{ÃSmu!а!dF2r{zv[d 8\y^@s/kWm|G؅u7=_< goy`dф^s9S\v&P߻alܼ ۈ@O4 ȸeɏL÷Ҁv` 8qpш" ]ж il2p,Ulʕ͗"Brdc&.- MlΕq}XHS;z0 &HUF~x#H&"@0b#hXPLJY/-2H3ɊӴJ62!+~# U4`Vfr!DvI$|` P~|42gKѥ.ѡii؜n2-S#'˗Avsi{SՌZ1r2 S8F+h? Z8⧢_cEL.&}5UIV}h^n[E{nsyqvZ(ml n%xaFcN5nJ ÙAd?1u3+UC{= /o?o&mLɄ7vU,}_mʇZ0d?ul1Z6&J4Efk' op#=M+oǡǡvw&E}j"#ȼвte= ޲[Jw#`:zmߜHC/H=HLqN棞JLk40>,{F{[Nk;|qVw. y*@iRET _ \l޴-is4c; 1Pm֌Cʫ+gRv\:"HOTKuS9fB L^B1wh/R4)x?yӄr=p 2 !T$_yޖj Iot_4]C$IUptuQmCxmu; sjAL1۫/RHLw |P99qG:cج8:(3UQ ^͆O}_pS) ջc@MxJUƶڀ<;ԅF#:ZS(8-Jf*ZN/TjgTu6nC+o-$L8kr"DTvWRG!&28QOuW(/td\;UX;+bׅj'NqofE$RA 0tH['ҔjPs|rU?[dŢj0#yGAx|n<җv5T#ٿ["LQrWu0xrl./9Z[6RT[PJ3o Y#FlCDM %lt֗J-"Hz{.F+2wx^ .:RB`] |;ev (Iˏ `^71QdV]K@ĿdyCA/ z\\9}>2Ӌ&Ȫ"=ʛuN]͡_"vc,bEOn/FrYH Bf!%v:]۞ xXRo"5ߍ+W,t@R [nOgH{!Y8(<Эao}_| k/MxߌbBk T7dXj,ausQY)FXz42UMSqƄJK'ʺTvw9}R&ܬ/Y;;dVD6W{#j8"ĹV~LݔZ9NpV#۝**̎&ĺβOÓʒ#@ڽ,mTpt7tk'OjӮ 6 UNEJ֊ *PkCEi#,HTPN qK 3 qu²NՍ> n/O!o]rC@fCuȈikZѝ.Qa[/0HΈZ|QO4Ň}|k%v݀3f~txV;_\dEf3blX:Vm;)TM_!05Q?FODNf-K6;/sF,9VrU~<'mp |en!6U o=DZ8y'o8TE$y6KmJA{y7@!,{m#ꘛ7R/Z>Z.5"i^2Ce8/ͼE<.ĠF$< ~s7ZfYLzxmY내flHW88!.4VNT;ؠ€)GRN2eS #*F=Lj姦W4 K۵c4,h^Aj TX J 7SvR8q~OI`K^tؗ6Jp$0@9IOm'/Q=![LGg|69DUR5>܉E&G^gGj@pWާ_g~&/6m,ZbVQNj3:@ˬaWJf D7ZFʵLǶ㔥1DX(t姐DfQ8D cFE96`4MW_^7pI)(:İcY FlZdK_lr ƑDƳaL Vwqs)V Nޓg0֟QH[O@gBSng+K%q;(HJLm}nভ\,X<)5VwzT[U}mޭB8!g=@Y'X iB_9}5 + ae՘:mj!@(F5<iR&ƅ.Y'>ױǰ'#dx7bz;ibMG`7oZ#S-(wo\5 *$#D݋mv`u ? ֏Ƴth@-7x^NGa8z0:0.z MN:`):B31qI-OJGw|Q@xvR0ƾI".\~e_Y {~UX)EA v%PgWR;k9J X2qpA]o镘J]-r̞MVPWw#Or7iUjs.R'~3rF908*Ӡąomq:z3jIiɇٷ`0y4X"\,Nx4`1koM>5Rԣv=)Q3V#Y i4w1iv-uB_8!]Q&)04BX%oŽS{eNj .b/_[f+"w#@c'?,qfda!*aT F E ?'m;IxIskx: 1 \-8CdSj=%#2,#f#{knKo|j h/i,K{pͱV 'EN{L]pТM>;,0T1qdz3u,F;~on'ƞr[Uj\cUu7OA l;na2#T&)c&Hj&[ %X4}۾e1 Cl bI.sۙoQXZul{^ܱ88KR)+3hݕ^> |E,\F[+Z2]#/0pxUYS(PZY.lR` x\D 1g̏; юLpT>hPySl6bڶL0 $=Xq؎"}N㣢::Ϗj7,7jLw'yB?C aT8Kgwq2 SZj) rhtrkga:{&VUVR&~Qţ=ndQLZȾB#yW˥+9wO)Ė{b\h%8\Cڐ^ی(!> =@< ҳFP84Jo9̪Al6wgv1V3 3%w O"ld 5@zXHI(m04OwǨF0oWx煒֩{M2 ~S? ݵEk_0+7X -*[/j,*xRP}&OPq~y N38IQu\BKrJ8" 0viwOq2%Z㜒 S"< ]cV""6]@c> 3`7w }9՘/ovia~hV^dZtjU>.lxU.7|J%ƒnўZŀ|B];eѹkQSAdpu.> ]w*8\mQj=3U{K8iRK 'LGwV2myytVFaqIe\V x_P#C_:NK:+{ݹܠjJqxȌS}0Mwm;YU%I6n¹ǫ5JqʫmɳZ͢Td)H!%,z8[/@kV7~$jmԫDZMw^;ޡ__FO, Kw: QHdĤ}2Ӽ-أ5rSf^B>k{RPᆹch)U1qtؼ*!IIz]hlh.jl [^JMzBL4­nؤߴZ52K%g{GvvQqH1,:'K%ZQ+THg}`vLaNš}?'jR5DunNjěN" 'H$ղ<;=#6z[$i4Yw((4/:`E|Y4)OB{iYۓ5qtֱ7tyHGpoۂ3W0ѩ*'X7i'tMON3s 8LS_<ɗf 1jR.^lزAI*˃@vqIHeYzR+"*RUN{Bb ',xRAҍBO NFW 8S2 9eV]@g\دKpI ;\am%FDfk@cx/.m}0*`Oy*'?4Fu>:Usdu0+7>ZQ,lmtQ(,Kh߬n(0q8u +PJu@*bTYk~?ax l%Z Q=f26T(<>h|7m>`Vki(2u'/ծ+ޯQ]}OQ:ct~00H8e1E B49}PPcXR&m;Hjؽ.?Q`>+@!M Inhf_Y=yZqJ l";IV3SnNRzlVc+]cQ%߲,HG3'hVWQ -֒VBSnQ` e1Qp8>j\|c;+H/p8#]HPzDK;J;;=p8:?_ZE%XCy"CDJ%zztg.2ZYxf`X~ ^Ÿc>POԍx9C ?^O"tKA|-Bt|jl?[1#YB/$N0Udw㊌NbeuXXJA%erGzsUqNK6ϝrH; Y7Us#,Q [])alA3nCtMj9, VeJu{%J| &ΰJҬymm,[63}=Ҥ¾DȓUokkyHS!;,B CC>-=27 YT-dY4v{Oi%:#x2K75o(b'U E.4v1ƝGsͧkx utPo"'Zf+;2HYM`X;\K~NRL=mY1#QMGD?Г :BDJ2 z2pcw1P)(!+y'gUFҔkbZQ4no0B% "\-A)<}Ϗu9sJ"R :MЯ6b(yEо=3@#ѱF+;6-]!_*{+룭g.ܩMQ27R!fAwLSm'aIhuq MwJ|͍ Tm[څ[9~A.wYdSY &u}r,4vEB. * Ym,fj/j&+n6ۭ#ݥX"67K<_*֟}+kH~gIvK:Aa#5y8'V˨q¡ ls8SlkٷUj}!!,a0 |[YTu%,;|NĤ~3哂5C h`Uk]NoAd@*@=^l:AN#Y}¨dpEKV.=JV`ZsV80ɺO՝7y`gq[ͣs⣋;$D8kWym#`~ ˥+x`.C*LHRQ@}ļ\يbxDBbQgx_J8ɟϥՈ*a V/m+S׺uap}h׾Ǹߏ&1~'u*-+&exL,oAqcT3A af"Pl(ёRn\6:Jm76@F~&֧UM]@`evTMyW҆զ{_# |)0,*6^,Lf1 zY`oUKiߜr=ŮX}t}g`s.w+hs$q:'5wқZ:ue|NAsWbsPR8wDFmfw,Tq:uw.?sQNHwȊSi݀Lun7:T1[)Pz>^9\+Gwյ04,1-}+'uyMP߂W)AB?ɚ ƾZT(ǎ6ƊϷ=smscO{ /QjrXJۑyw54^i70iл,lp&3xazg4f$7=itpukrq#e(D!?&< Lo:00* f*Y|k*ժ?HtL ~Ky ݸh&䕈 !oa5"S_ 8g{ ySv"1I;hcA7`hb= wMEOE7M% CǦίy-ⒿDrX)|Wl&Ta< ;qG RE=ٜ%g5=%G֨PNopshLPJ"F G+?rlum?X>:>EiBb{,"n^Ň0GУL<Yۓ.R˱fz?GA<+qՄE ;=&X Ȓ~RIvI{ޭ mNbnLqcGJ98z߿ ˆQ&XؘRdz ߭9"-6C2 nP:$ R,^{k=JM: zKa?Y>EnCTm +kADlrKcógp:'iZ:PY_7`,KAOLmQĬM5<rYdҟ+[hgp3W"V 6f--aA<@tlUQg_%σ!a!ZciFp=FÝ\=m#v fǔx?mHt~oCFq[HD' \b㓕X!i }n._ B@Z׌pXdb|~,ְ`o:ڬ>ÙIC~j Z:<'P>͎맳 4 ݿ6ӛzMt:ZݶxR9f< RyrtC/)S'9KAq/sTfc.VX BIP`|F,ڲ[z:HsipOg<G:R&p@ם/hḇk V8c gVs(FPU&攑Thq,$Vo+vNi)jpKeрRYG7k'饤z+C*4^:%RSq;5I R'5N[rw>SzlO(5֍G`n*9 BU #FMكΊD57:Exhy?=LZĚ{_Ԗ/x{0%Y%21ࠕ.Yڧ۝76C3kaRv tCH4k0 [a =+ NrNGCC*'Q8@uaQ6ohԧlzy*lĢ=-sM4S@"f!aR*=Z@;9p@%  %q;9)mc4 krwXhZH= !=vc֭гm0pz /.0?M)dͥZG `! Fӛ|vWȁ*N  "_DO95ȓ^C jrEfUvRoSۊնǥ^9X&D J:UT/.q@fd<8r4c(`Nٽ9I95 =+-6z8㙒%4YROB[>AcB8L˅K[s$gKqOeYYdȬtPW`Z4uJ5{R4(ƬQ\D n@._ؼ%4۶}ޕ= $#⦯Wo ~rA+){ˑw}Ҥ\Ȯڇs4Qӭv"3Ph6 Rv;7\( 'N_Y-#7+P {ʭ9<-V31Fll3kL9ʀdX#[vi."(T!- #LV&pJ#/ڲ?*ZŘh9.zKlsbyW#8+7H\VǾlx)|IC)pY@` F.qg9[]v FMa lsU#^5;qsYR!ߺ|RY*EcM>9ъDPLУE\-ԥsiÕpȲG*YMg*9OdA1@<׏VzBt+EၗJm1 <3U4Qt\`}xEtT|גihEtny\@z5d-QPʽ[$czOt~ :Eӽ Ɛo$8d_:Ju8Ѹ$5} Տ𪋜ȡݏ2Jv`uh ` Vvc5 [ݮ)عTCi~Ws^9dC5W3Lrҟe9kSG!ǐ!==*JVAݾ.CLئrKX/j(|+0v@XѼZP! )8uǹ{ir'5GCV\ʆ!`mRwս]O^a?xΙ!@<6^J,!`;]!LG"tAg={G,TZF4HI88ű[tBpu?X)dxq])S.Ȉ1Eyn !ص{Oxqf/L.nDmZRّrem%9A7U# '+yZ|N[ L~՜U<& Q,Ӷ` YțS^0t;"15󃄞@ICU.]d:?NM6̱]'ՏDfd"WLYau?OS2=*Djt=kvNpH_P|iX()R%{GaBuq˭ʔ|_#X=!wUv1p!9ky36]˝ /t%/ki' 'ȇG2Yh!H $ |"ږt?c,*bA-=G S+ o{ZWpBW8=i{/qmn2p.IvuW2.Dr $'J =ff卜S/P{t/q?oLVVy0PEm.mCwނ]m3:|E3rEQ;0.eNک_鯃_4c8dpe?jE"I - !&\iAUh໤YL7|2!TIa; V(nwscj#nz/PэZPw8< :T:=DZ{{hh {y ™ŻjGTRu̖17=|92)Le-4غFG s{ӨFؕfR+Cu'*/oi t[ J$58xUE2ϫ_$au=V!\AMH1Tw1iNjYt85CRzhđϐqe&%+JIZzVN U@\dg %n#aDZ&d31iis:1XϾ@szI~ݠaSrnTS]>Ve(LS%zXd1seL D{Pŭf['L *l4`2T ct'nˉsJR~wƞ[܉-Lᮚ(o~N[y_Ё8=\N#ݦ1)sN'WIv? @zHO+/WY;R vjD7}#<&En|20H`hVHS/u( e$ C2ҥɻeP,%GZEQ\)fLB(nxmd6ҴG5h T:'%a"rVas14ݯ_]{/J o45rM}7 v|D IP]Hov\ie;NFP) F,?687.̺|fv]B.A]6<]ߧ+e^.҉{ &Įކұ!UBlpOqȧb9] )EA=l2]X*%ܐo?Dֳ7烺)%2 'Tt"[Xp(X?aR{JbVg>BW }q҇Otbױ Xr)Z!<w݀%̫RYctAB^@$4uzqB/#@+)ه݀ D/efnN讔eL7EƟ55^wF8 *h#s>t<~5g3Y41 Yzuܕ`rwӺDunD^ QsF4$/ S4"/\?ڷu="ltg|G^sv/GUAnḷ'*|8#z1y6^58*B5ǫ̑_[rK/dGl%YiD {aibdKj;YsGa32뱱WMD-UTƉ@O䆐~D^!%gUs# D^_< kae&򉓰>ÿk@Ȟ| |>I,:ɧ\giS:XZ{BXx☢~2ˋM˥:o8V|8%Bh_{O񌉒b{/YcˊHIvgV̮xU؜,|B\Jʌ.g]J,5z.;qN? A勩;ێ6Wv IbmK8.dE$eJ;Z(˚=CbX<_CG[=x6, fcy<@^NC2 m~Z˽`&fX,PZ dfab|YoRs=,bkKao!o:-|EXGEV޽$y \afw7=m>9&Kl=@CX7K(j2F j-`ͅo!1wGO\tI"xh\F@R>[*@f u&s-^FKFdXu>kr,8Ǩh%e:CbgRJKe`Ե68_Wb)XL\3kyGyBO+L3ҿ͔Ok>-v^|ܹ+Hy'D$l46@M5w3S 9`ymx\O;OZ:?&JωFRpT7\Gξ,SK~J=tM /솶DÛhHk;q5QEaYDWQQMLIw!°7wK팷rњ9iu;݁bOrY=v$E9c}nvB \_\A?_,z܆OI9yjʃ0k˂jrEve?gU`?mȐ J:|[ QٝߘWقbnF_?"]iMiȴehFB}Ld0\zX@@q$ u%^ L+mZFy7&Ii Կf}Рtr&_7Nqw27/˴SFOM~6B"Oxķf.ZZ:=avn2H)fpȉrT=od3ZW}ב4RuC.Hr_oB0L#(\Lh9/1X.dfB*s$^+'6ޏ~ib5vT[֢ͅo\kc1wK [r潃XKmX2dIF1O 0B g|-axsI&>~i\Bh$Ū={ɂ\,gnLX=2q\Uho{zsSb'9) *␁O۹2UMeBh [/l1V<e k1'vsLd1t`$(b ;mg4&z>"Q˂o _KmV!>(yI9yQ2+<7W*P綳@kWhޢҎ}ua?Wqv4 [BW"a894/!Mzqc/f' s^bNc*ou_t2-4W塂QBKnMll3RL0i>0]` ]X72l2Lq'n/#u3_Tm#l*- 9*nx)K;kimD0zYqMe!|JC͇~(ܳBs9?nƘ/ ܺN&։|8^SzPm[ Z0rbw$@lܬM4' eBS~AcS:.EdT!}n }7y{"V&Ѓ)nhfjfI`.U}OUU 4kOqn+>g!U/o 9So%Z*n~(jZz(iaH-;iMɭ3^>2fUI^qv=M==VE\) r eu>3v~08J#u۷t"AKZQi;BbTLˬ<̐Q neuʈŕAitb#L RNS>Wص:T=Je;0ÒJIʎ⫋>~x 2*gžN< 5;YяzXXYiFTO)Ӗp}~q:MRSHGTwZؽvbjԧн N1ԒI@/Zv &⛣f %yNp]v:a|wO/ɇjC?9*FRcDER v#j2ԋîeDH"+TשFW Fۆ2Eǯ9 n$gtV%D4-MsZ%T N`4eK \&/叇-xL\{}%a I,_k8LBKޘ%>ַzyKсdf1 }fzs)Eᑾ/.b J铮_>h\28MV'|i|cXGhyL0&8;R;QLʣ=@JȪw6?,~& h5yf:͈wXwkqsn*%[/]G_9)w#;a=-MڟB`P)Mzi \G|xQ7OA-+n' ״G7GOA:cB+R^4[=[ݕj2#huzF^uy=TA%NQ3'_cME3\f$Wg"΍AFtZǜz0p`֜kJRf*e,d1lH 16 \W붃|RYq Lt.e r%4Vl kf2y7Er >̮z j2E* /nd+{p7o@s<}L!"lo+A-NZ!aL6l;?SaUe?N8MBLe[aP-֋`B%ː*-Ct!BSfrd %H&̣Ů+EۇH/sևܞS3\3^py~dv7$ "(8͓KpGB79Q977d[]kZQiDfn釄DA@Aؓwg57do~bx,Y0F$&&$YxM$!通q>0tWh7WZXgS`_Mōlt^wE,-eo. [)ɨJ I,&=qb6+L 1:'ܓn rSM? 7'uvgX+iIgLEVcv%slZrXM5_m-Ŀ?}DEftXMƸZV3F֪E,[EtkvLć =,LVnǽN X)LV!)"ŴǼ&dЮ:x$ŕzPŏ d5Gŕ>dBb&J S*74|v+ %dO_ѳoҟzW8vf7,DVfIPz$=4 āLK[Ѹw]`elk3u.8tp1 Yͨ OGppå9>\=yL`^n_9;ךZPv)PGE혢3ga(N ^-lNTҎ*\|Q%jPq[gX) -HVꮷL+C`Yoq&h2_"EJ#>.hYc|{̫K͡pLX ~jNYERp7 TvϤ^l>ĤKl@jo{h%QHDD->d+ѵ!/#, 1])LrC˿t==~  N򝅂@H}Pi@\C /)t9 HFݴo?&B`Q 3#KGݶB}Dd$̐O5y]S?^mA؄V}2Wۿ^-UT3xG@ס7+t*|ܤ ڋ˳8 ^>@ }_mΈOb+cWwͳܮ|ڇ2Ko~/H>O[xWUTaQS]>E2)n~gdEj0hJڬN|kozO'?1u6cK?<4(c^OTf@?n.RMOw@7iB!!r>5VCcioW,{,4c( ` 驫'vU!(fi;?ǥvj'3,=U,V y'Pk,=v Zn/߯Y = 6 7vƮ0e58J=~̄9N-5mыaVUvi>u/$}xr&~$OǏ=$4dvf3,M}ˍ"196 `l}WVw:$@!JX\x f.X_ĪA-X a@D8@hi'9 01ߕ1P[ \ghR2q1W*:S_ty(+z o, ixD|*8%?ArHFڇ~2E59*4yծ(&S%8@^Пd-&Ciۙk (/fhZ֫'c#Yz@Кyڵj+xzEBVqT$~E;lԸޔ*MT>GEXV(rx\fĄ?/HC[S,;=d4-'cu;?,N7ę:o8k畕?@xE :8`'C庸~)|D]ήf0K|BcGWs7 C쁌-~!3f]{@%;rPHIrGqi&:dhԷyR_.c{k)bc||bҘ UoT!nƔGg| W@ 9 RIWe2WI$|Kgצ6b"B ]و:?j\ zNa'dQb4iwY}Ex9$xbB4Th, ٜdj yr៭FRY㘷R)Q7gkUuJQvX$Grex8ٿzownHW*joIS'0v 4!BA|ln5]ݑ\m-\BE !vTS VFx|QW!>SN"?696+@*){|zFco#)P/޴N[ ,f|(dԓOLb>ȣDÙl Prk]Zjzⓢi4xz[Z{& 6?cЃqp~3ys&quVVE.:apyY8J1Ʃhet?^JjY` ѧ),\H*L>LDAa{ O-Zb;6́h{t|aZ(W om,2q2\{NHӇJFZ'X`}U`.2K9Zl{ |.Xm4mqtdž7 S*ى7_A@ma9HHЭ>wni65Ud;G`ik/Q=>>ږ'ٔ/asb@`qaTr l_q$Z) ˨Ԯ*$(I o_%L7*=-V|H}UqtiIb."Ph%í}0ڃMMP|,N>i{(뺻~( LS`%ʁgD|۽[$/ p_.fvHlM*EQ'RrX:UһnҼy^LmUE}ND|u _"ͅG*O}_]8q܉ѕ+;?A%6Vk+x:]:w&w4|k)]l3#Ae`X\xa=sM_8#>*ryx^qۇ%R׽]22y7x oj]]r|pa^[|6~Meؾ%u*L-[gh{^th1ԯٵE{1Nꍳn_B^iJD yOFRIF' JffMPsrp K Gzi0vordi`D ?G*WMWsͳ 9#8rN&zM4Yt=< =锫8O6rreXPezJf#a AM2_XFRZ]{ r6q9H!#%Iu TFX7w%XS8bw:l:BLi3ny'`VKWKãL4^gqIhbY {,n=q QTnаVFs{f\^eJf-SRRy/  L`KĞ&@ !|WNh[U%a-"X^O-ɴ(.dBǂetMLL9Tĭ1W6qg3YdG|vW'7\6sl؈q'~`@~]ٜ~[gqy#gmæFL>1ѮӫWS5PJ1N(3ErZkK/ȝgۡ^ !BP >7Ó奜'JA3< )idFcnDKũ8 58|  6 iPBD`?.1R.*^^X;k6Jąȴe[Y8MUIQN!d\n|WxgkAf--0Aݵ@m}a趑o/^u*} hbsTTL[4 '6̎2?dcݪ[lJ1F/GveVw"?]&]ad57tUs!v0?;vQq9ceޅ 1څ+O"}A0VYy \V)[XGktb[[CⅤ3T:x$q|a5hR +HxD6A4 ?4'9=U+Fz\&'n=zڭ&',v͐jj\,K3mj/{ 'xelWfIK?A=hT_%x"XzZأ6ggzڑb2"St5foWiz,wSbMaiɢC0;>RSv[*b Ok 7н5lyUxUk4nd悪Z3$(+a!!Y̆b\Ա\29'2J,s^rߤW<M\Ս!8{ |]97D[4r} ǠI[*?x֣|KܠU7J L"5,^h@HKAe@ 5KQ:sc[:yYL]+$ :7ADʍ ?,c%%ԜOV+$D,nz`[SlfxJtU(}=.sc|2j]f$(~PjXFе^Th c|( 84L2=щ"F׫6J **n?I Ok) 7WzN2?muJK\M{MϤuߋcM` rQ[>Fr`?vK#ԍrCB6ڙyX]DzO n:X]9KrS}}H ޺Ҙlh>KCAt4"!aʹrab +kjq靄B(D7W@,YkiGd-6w-NwC)`Gh o&B]pm(4I(u8Bu 2GK syċ:&EhrWoĿ޽ummMHӪe:Q;7 G$=]^gu,Pt%G}.IAՊC@p/{R^MbkkXwf0UZF)bF;1 ^|d"k2c`Ht'H?gcuGZV7r`讉CWMoUZB\gkVb*'Dt>78JZJP$]A+G%S&I( vUj6}CqbeCng?+4b%/EkasǁÌ="􄡿/@DSAg0Vb#пGʟ"C}g!dg\џKZto;A>>fܡ8NtpSZhߋ8omP.oST(j^1C&lV=e9aBUKЪ(7w6$B,U2~ɶ=hdj4T]LZq?+!PUw)Z_5&ʧO':=Hʭ>j[9R1 hx2dұw"")H7`c`s>Rż8قm($1PСυÆڵVHz z0@rk}0ʩHfWN.~62RH_/xeC;'L$/ulΘL)@d;بF{Fe{̕װOO"޵+Ch`\pdЙNdQHYrʅ& k*@ӜۢtE7DM_➱Tz |cc/}$Ru g#mT]i:quia*r+ƒ{dOZ"Tc5I,=q5»; D̦\fw2в3Izij&9|[cj6 „lGSl RK<'pBc _J6U;d^0Ci1XF9|.ҥWp_^pw,O]] dS Oᄢŭ^3h`W_\f*/(I[לd>`_؏#V;6Z зU`1=ʢ8 e6i:T#Ȱ~tF K@뙚2ZeЫKBlé. U:x3$+x%Wjmq=#zLxzQ*Mbp Tn@F wu͌g=p:"ELbEs9 mmPIGh^hh3_3eT":B/hU :l逛4Y?%'{0K̘>s|BinoCiڌ5eKx:x'n+ 1vF,G~]걲5i"31եu7Ɣ̟咻X.(/&Ke*LYZT 48ZڅJ.2%h~"؛Fܯ<^S(fQ\ҥm;Бwz3a7Ѷ$=צ& ]Aw}$VYϖru%yX\s%ʨUX+I7&ٜL.o ![F{+/3Ifv,QMqy+>N{<-z.'#P;UsMf7^Nx J=4LFb5$(t4cOd9W=/0"Les(t?<*XFc+C&\PZa4W$T+#m<*&hpuRQ<ȸ)- I68LVmfF; >t-dW`Wn;pJB0O)[̱Δ>ya\Hb>Ϲ[OkդQh]0hW0ѯ ·z4KOC$F(|tDw`omJ]oaurhqHrBގYҩT~o*U.X 4xd)PNuPw.;{[P85 +1(E~/-x\E20.+<#csZTk02G6 6^nΙ3Y|-!w˛|9Lk󊟎Dd0@s+ M+My5?kbIU/VR% J J%Dν0FGn_ Dr(z,#6C9$lB•~ĽNK>bcby%^MDCa9t-`5,*cG1*;og)-VTs9hT,+=Hś8ZvM3jNtiz)q@PiZ!ɐ'*KCy (s(axic[Ԛ[Adid߫}A!"[λqT5N>u2s Y'Z0/p"{ Hr `g#Ԅ[[ V(V;V08ÒZ}71?hi?ȳ .IzbhK W>sZտ i?$`MtD|=_kayh03]^ ocUI:d9i wΓ_BbU0`l 윢(Cl=Gb^Huh("0Ҧ>榳HYRF(U%,$;jQw)WSa<$@4@do!J 4aYsJH攍Ҵzwa 0JA+VG{&U^[vltŘ`MZu3/v>@iނlB^U-p_H<1#4(0yPq{?hܯZU`Hx swbWjLK&u=SL*11NL_5qL;R:G]z[D/(~"x@78;1ǰ_nvpF8{h{lRX!2!0amE=-̌׺7Q^raқ0-^bE1/G&B9@?U:|WYבlaqz&k;K 4 `6z`<+F7K޽Q|XU(1u8 uq&ܿOB%? %;eNED@Gٻq0KԷSʭƑRL>ɥINol4gV<)of@$$Pcҁ0V:z֛;bk}T@SDyv%1{Ո6fB9Og8F,č >VcGn4_d`j;iĄ-;kKBNf8;%N4rMu͑8bh@8l[e~bZ\o-(T-!ɜFn Q̴ QRYБ[SPeGaJSmRCpg+m <+I-e77GJPl ՞/8_sP)`>5up%]<˂l<;wUI /TM{zW8X33D(]ZB\Ω[blYbx ib<9!b DZzobn&+4KقD/vbҾecI"t'b!4ZvW6!0pJ J3 FkHCY'deM|^l^~af×?zlvNB wmDݐ)"Ԅ A/e)ĒFd ~E* M0[@?95ٮӊ|Dm;>uS=1|EGmqzphꁘ'Vacmvع=-G[jxBc6[a`-%xӚuV1Wll۶>;~d$@VہVMAh44|nQ , { R2Ǽ's‰[K E+G]9KBP*nW2dᎽ*Ž2tìkhd4!hlY.CR@DibR<8 P9cI J3Bb*A)@111q^>tp:|sf1ș>Xh86Cw~<ÙgrCSV{ ^gVF+.qU`2\6xo:LHg55xd">~,(Υ +qrI5˶%;UE`vAyQ$84#44wa6KwqmϋIV0}%.ޏn,P G-( 3S,wGuh0uhx JAƠPւ!1RTlm=T{ńMN*([6A@ WpݠG\jwqcvqTtc ww|2HΰpTb2IlPʇ(?bp݆^A-S1 u.Yq G{ FGd;W@_IuǙ"ARR@a g P'}[jD?$8ό _4a#*7j_!'F=jQo] Ea=X8|6.^A}3])dQkDW>RΡg"azd ?j,pC[^Gig,=:(~gP1GpLyhJ`pS6y3:eU|~>MNР|-ShV\}K'pvVK]`sK XU|C>;6'HvVuZ{=f}+$-' !Y63-dbglG҆C&o OUȧGkWlEe%6YX8Ү3X+h7ձzΓ̷p0(\QC禱gX㠊M4/Dytƒnvr~ؗ]yۧ/L=uAhueOH}dgM0+^g}DjShlKڢ˗ӝ_ ^u(Ozvdtyymgod ɤQm(ߓ|7ã}FbVxlj@ '|+p쾭OGm_ۯM*_yJkto5(7]*** F-[o/gh[JYWo0k:Y3}yL{!T D"?@hIeCa c1mD&S{"Mz WfDzq𡪶B~EmPUlA^؍暈_*Z+2%Lgᑿe 5G#؍U7-Had{87m-T&=N^RҲ -¼Dц5!}q O N&9·j) ^XOoER$ IU/ь}fgaP:,aڀ`6!nZhtFRWsn([97Tm\˥tI,64ꓼ#@ Ɇ7YS"f2d}cR'3>}^S8AP0c8/t I^lHzV7Jq&4s>eSXJ 豨 .gPLUM깝iQOSDnVa^kD驡JcTrjκ匽ҹO$ $]>B_ IۇV%b*#mTN/:6>0u/1RmnŇ}?e魛(T ûPOJyڽSuKV0)$-,DB,DK![fnRMIY@cѩa[ sm? RQIOos;ZM|EG y2Y&3+-iMƳJwO?!R) n|@t HP죳D0cR]37,Oe73A8Kk$41'qgT2#AǃT>u "^!p~_Zu[dY敹ٯD9?jIXݪ` F?5LN޶޽ĐcfOKtzx̀-a?{p鳨lO XV[SoV=ey tzq~"3z'zmuC]-g/ ΕSfFT 7S0&یq,cwbt3 Sf?#rο ͥMrf{\1ʭCiRmA`*f~I?B9I'H0lIpX묑 NQ"^2}C-qUv3AJFQ?ۀbKa-U_[J͈%á)YeEo7rtf# l΀#ߍ9BbZzY^+xjJd'|;o)̬q𒤽PL{G)@EiӷL 8D0{k0=%?LSykXߔ(2V^($􆧛4ur?^du_R6Y n%c&粏F#jɰ7rm 4CH'h$)ūA׫{qgD,`u Owή{=$?!-}xtXW0lʭ^֣lL=.>>B/jl̜#1a{qqSedۜXH^-Wk "2ݧ ؜k~sٟL /P Oe\,f$2*q_qu:#w8rٟ9'X, 1~dڨQ&>@78lNj\co$:Vt581ےqg`'!(8VYx0{M%q;fuJKbHV &SSN "ң}&chU)[j`ͤNLs5N\v'&]v6Fc|FY/)c%g9o*uXxy4tu 9p z1&[9c.3!-t//Y;`<z~a6OoACx͖KW}E\>u&*}1x8#y2UH6V΅rmД;_K8pv3?/ /#Gn1ꄹ/W;m 7Vݑ`I}=uS/^PpqsQd^Nturi=`3_pcP oY6kg\$2 #_tnǨƖe y[#QIpMg“K!C)l<)]t|/ p&-fOXjY׌`Qq1Um{{C,_&FcH;uS~kUtq$Uʅ[Y(<[m<8ҟ:*>cP1e%5 alHa˕[G<Ǿ7<33cl E%w& D"ݻ^8ޝZ\E@^JD\ mWFO{٫}X.TQ)n[Τ_gdFT:PS6VHϸ*╇$ʃ3I<@/ 6iP;Zp=z}N9&}rh{eeՂH-iiƍlt;j(ܗ@3CreP:&DFP@vFL~ES*ܐ.ch0~wB$[a7(RlX{vR;;HR:\,u]֪01J$O7C9%k/ȢN3/yԟ8 >3x\@7R,sGƙO%bBlA5;) !rOT+ъdWKQZ Ъ Vp›;LZ j/!;-)l^GY,xcQ*u<7nyn[d׉hr#K\NCAַw[חtX+0c tfBG_wN˕1iR=MP zA{2D;x*unr3]T$&f:]̸qpi|\nvgX{[uBTba폟`<\q!D z'b%ׁ-3s,u2yM:W*c2BdƜ*qAe2;B]~:i 7wX$یdcfφY((x qhjh ~ˮK5آ+9rV0)\nhK?\?1# u}!6B 2Y><՞ݐnK'pZiݵ4J`2"Z/@HlA(7#}ؙnH*`B1:jTZ6ΔƎXLMtAS@L(VVqoٍ(tZ|.z9j;orFZhL(Vlh0De,s}*ȻW%v2Ȕ'D?~ޱ$}l_R)MGSi6^&so89!E` 6GYO=_P(?N*Lr) aRtMV|:SrgkR3n* 3IS5>6Y 4% 49l$ZA|ݾߑkM*j1gVkJ^K"]a5@`* h @Aү97s,qV'klG(sgI_|sL^Q"Pt⓶-@Ά/g=hFgMhWweynw#z x#2em!ϰ#}}|aٶ PJLxaa~TfPsc'\qbrt{cez) ƀW?kB YAU|jL,SԁB+%܋PxKn\'w):֜m}R?4ɱ Y(wVr2I:sW=f$mb`F :UrH8Z- .u&gOMJ?i{~vzȡ4H:fD0YmtN,R~Wwy<ckm߼+3&g rEeUdrnxb j-^AYit"BLI ܺ|d _XM(3 boha]@zMcΨoiygUTGxdHS-uLOq*h$c0vQsC֪!JԼi@5G&qhm]ݧ 3@ \^ X}M k;g8Avgy ]^6asR C~C]D?zlⶾӅGpfh?'깇 x8PpNʗis:iۺfRV4ndyޭ ȂF/&mfMu!%`A]Io=|( 4\olz Dw%1 ,fuᩲIEE)S0S91xQ_jNE c|j0blw00[}<;K( \ۊ_Yhnȑv[Wq$o~ LpQ3S5u@ )+F3:S>l-FT%7ϋy3\xr1pN\Z9#X5g&B^X%gPFVi zPф\*W;n<` M8")Ў݀69 yͨgcՍ-.[hZ=0͎g)t>" 'ڕ`-qA1Wf%7BFE'Q(HOva9xe5O%;Q!c$ӘfظWؔF#3~00efTx+4|hkGye ybo92-w@Ci^yN>M+bNB9k. h_g!<\wVW)My~QlŐڈ?!'pI^@Ļ%%ƶvfZ?j1MȥVg`1핍-rڀ{0q^EWw]ހbZ kjunHPJ/ȏޅo>2LHuSqspDsCs1+ՠNvة$vsVe=Ƽ_czP]':wϑ*(`~$qlZ[Eaqd,ɃT(8 tuB"@(~A=(mףt{dKz!)B{@{q;M'mcZ&&=;Hbn|2tL'cft4lRt8-ݝ@E.+3,Eɡ麥Hvm+WPè/' 4. u#_ tĝN,V_9Bb@Sc)b3w[ tJ-V㊏yV!n߯{:]  D[1 zͷ ij_P5{D^;Xf _BQ>h٥T(<$/̗U՝yL/6l@vr׸-ir.m͆{34dЊƤJU|7É 8/˞]-M#źP56ύQ+KϮiV`+ 44j6W Ob?1R,ne\ƨ-lQ|&2|IŌ+>B-5mP9iI\'`zLFtS|ZW])мøDr~4nLb jEkX, iʆ۝uގ`(Ÿz_]&{H%Vu}Zh.CmV8mŀk Ym]syFw^CL{OM5"'"{ZCN_9,vJ=n1גk" ]Fƥȯg1Ca`E,Ȫcxl9$|<|ϧRo],EK–z5m͵{ -y/+ƴa\uhW>JrW7ʁܕ,E5-|K*1tS\saBehmH;/W̘ ʣk9N5Q/UrSۂ"N2%Sz?qzCL6sr6Pd';Y=r׳WIO'{7_=PUI?;2A߇*znIŷ}Mr'Y3dwk пN{ႜDdRwЬUS>4\v3gdE~`nR ˨gom M\S/k bs<½>6#sq)j9v n&gc9-B p=]N'J>'rݒ$kK6'σe:33=fHenBsMt},r Z=#!e+&yTNvu-((!fȒ6,M,,m n餆G6툕&U_2b{}Z KROs`Дnj_pwF'P^- WI t@*JCx41<^>I&xUiN\f.si]{}J8#=AZq3L'mOIZU\iQccB]$9'mOX|?ɦ .-% ?%xǵlZ U˞GA*PK7 >ҨP'"k])Dȉ3IB@t+ʏcg12-$WI+)0DH8<;Nt}>0]# !WMH> L~^(SE[Rv 8@{Pm=iaN6zpy:BHɈedIGuXBf2S|>z_CoiyC>-!78DR%NarG=<Z7584JIh,쯫{M&[+s<hHYʊY.o-$"-wIL4NfI 1^OhL9/J Ep(؝ 4ҩ`J1zRz U.z~Tf3',B;-m oJװ[PC%$F8GqcTP˿u.x^7gw G5ۭS,H9 ϊX6F9j9Dpa&#[S'ˊb"+`51z^~KAofcmz2>FE4I^oafխWgbiЬW8hPx%̛<]+pIe*g-]^fI3KH\>`Fu$]VQg^TY];_#-t?-lnDDA7 E|!5x}E{̄f "+#"W4ﷇEҜr{*Ilh ;xa? Dqjw$ZMKPqŁP~2eZ8@ً*4V_qh"20䈶mu`ףQ5P*Vcځ $kk & MԽ0ѧ ת**-La4z1Un3p&ʺ`K#W=d*fjG*0qϹm #tic:[B`bZQ;g,TXGP~IJ92ZpNB+"V#qB$y=Oz;6ܾw 2>!F5l?ރ;!A*ww?b@ts",uX}+Px$;; n n~EΚ5T.+Pr:dC{u"sy/c옟o0sej gy خ1;5S`3 D[$0*l6Iܿ~E7˒TZOZ3 2+vXL5Kjxq$wB? Ȳx| !.6[ltsIͪlơ0ch3b0'ikFSh`T5wZZ\X&)c(7LF ¢N&+kXR2c@ 9(u)FiA48b|27i3+mr`^*v 75K^"{pL*b:$K}օžn-v+j,z.@:6m/78?N*(I޴@dKKdg6 VabXͭ !Ç(cl ;@\ndp Rƒ?cs9y {7 DEۘFgo%O[O'R'={y-s I`QbtoaٝOJ*tѤyӊ890O4Ox.h:%l%"&d\{h`8YnKDє!WMO}umkxTf-(#qSdV|/f[a]N"Z+&*_ޮ[R իr$h<2 FT;wAZ&.i~MMe,?}x +QIc*;qh$"V9h/6^ $b^'ÈDLp˹JpQ2ܥwRBwBeGJ u{VY'kR;Ιph7x"Efi=ؾ ;LT , /UPS| 7gê@^h,@] B#rA38h͡sXvh^GEaU]:egI*Q2Aǧam!53'b`G+:Y #WA&mbe4?K[4#u Bz-SLx'u}"n50Z$|ϤZ|\7m%GrO2δ2 玫_,O~Iː:"F:cWccۨގ QBߒukd቟'㛦,QC t'N5l#M&|+Yd5ŷe;iGKrG@<'38̝tbt]kh6ڴc2 {X:lkwGJ2}v|Tke*dz%[:sL?c۪C!xFtxnUEi]6i*U?I]i{$qaF9qp3oƜQEASśODkIr&sF.>oؿ@5t!7t|fGndZ㹽ϤFK;g5+FEu_9pHS0]^lvM B.OQ OQJC/n@t@{>f BxaUfn/t,>At TA'ozF`nOR !f-̹YƮ}&?r cLTbK&v`97WC)^SGGx͚#娻T]|⸋?~&NM4œҧ') }Hƙn7Lo]HW~m! &"ETŸpaA\L5>Z+[t%;/ MzxWA?KS1X:!ڛA`ɡ@]׬ ~*T M% OeBVH ɭ9EZmz~3e;^]#b@5D/"7L1' [l&YR{ m@qqע^0i. ew D;oP^=N IK&)^(gZfŚ?bQH"76cy2z.^| F(83N ^80] [0RR UV{.,BmawڇHiYEWfN 6H$n*B/Kdu?gKm*ѳ "aV.V36扁oJNl";} ye4")F*#c;歡fSPsJ>m:p),nd8I[W'ho؂6pbTG65g|\*rP;->𦘆W:'GOH#̿qQ=܀yQ(JR٬_6<4M M3doLŻx/+âhiډ7z+D﹟LJZ ͚#3 k P:d/`QmZ;wS Oq’!cH)ðx (~*Pqsh[">H+S XJVg:QKX{:q(HH.x`haSLD>Ҕo;SN/[vY,Bx 8']9H&il$CBM&9qI|QoNҊB:Q{S S.%z~Q?kzr ݛ]VES`0TdxxSE8΅] &-g=()qAkU䜿Q,cD2t UiL;QIpq&5: =Oj7AUVZ)z6~VD>/|g|'11Y[D@1T_Qܕv TTJewˎQ&aB4ˏWGN>Ix O+0`X pV) ~Ժ_( E+DuQ3NƁRcIU3VFh{lx 8؈.b9_Om Me+6(0RCM̲2J4 D^K[QD_/i"ތ)qdK~! uxm?6iD]HL9 jm#տ56ӧ,V~6hAպtsv^z;ZC~vED_:]Ϫ'EfGsu%Q% H+0 b߫C1>p$¿6U]Ȳ\VbuFz.a Ӟacvcl'@e4% N^{[0(vZIߛuSYgUFۃjЬǃ/[Qv`ɢXh;/ȅ B9f}Q`wJPPI(AK9C55yW针=z;A4!S|۬Ug,c YԀ`˶;x~##R3QbĎ}*u9}4" iDDI,l sV.?Ĭ) C1"Kœ/<^1"R/+d+Y0]ZxCEȝA5:?c B]zTخ|݊ObIk6c^Fr\ăows4U5g%I}&>R|%Lv/v.zXzYlT +zleK# ]q:_BȆt i,!;J,'wъҋ 0;܂JuS`1V0t͏[Җ.hHɖ,},^ YT'/RDLқrGTF( B_pg!JӲj!mNr`V=`/8.7rN47f9!Wet>=&0s@#YjlW2XĕxGx[hme͙ͅV>pϝTu[\:IJ i~1lStd餻1u,jnG=Ho!Qmn|Wx`d _Kv>Raޜp( Xa d*,kTO?,-7հj$'`cZ a=%Rw/бh;kGZtmg.f龛@8&Y*A+G>Jg^%A,'4J0됛P%q4`Z^oj{stqk~\ّrf5$E=Љ#CUA+9~krq* HM{;ޕl"Zΐ!'SBBO7Qmߏ} UFٍw !Mv!؋~!(Ii+ፓ*% RCO)`yudf'Ԫd4Z\:K"&.Xh:KSn1$_ )~L<"–%2Uhe HXPL&"8獝֫%1T`wюm]6Qi(ƔL8@c=d@gk3Pj&kVlodx)~PC}$Jr4K~/Sw/F7 '3ӱuХ ήqĊ)f;]I~z6%sh3A.A6*?qt*~_kYFW 9}(#WvAWxNY )8PfЗ6?M(eG>N:sMzU2۴zۍ?@gSR(}Gބv0^  mDg ĺNgGlIi#rS33c5dTbIlTBHx4UϪٞb>KWCZ48or<\s6i)$QH'-NJgmpw B5sǞDN-EGANJ3_$R3"L),[Dk=.)bkNz9; kmzPm-n0:ū3bc ǂ% G?r7`5MY'@kQĺUK.htAFX5ˆ+rqݭt0z ai _ȹH؂Nnli]`-r;p}/^! N3z%lDm`\MB?/(9z؊h>xȑCu_}dp \br` ͮWA%#6aB8bܵ3ӞC͹$yvzX7N ϊYrF "HݒU|QDڢsuZ$48>=C 5g^6а"|R6#LU"U/!˜|nS*g:tR84RJa1jiDiH=jЇ:}[4"A>/!_gfwGPG*iVTJUivBpo@1cs ¨ ">aLj/9LOވ})N A.hn W nZyq}N#fx x_{o~²<'avµ<E#8Ƹctg,C+S.?i 2ÌS3v4H$/ŬWv\i4Wb(2ݳ<3kWR\!=^c!޽'Xr=)nx53jy>QFrl}ςl*Sp1Bbg(.)vIAE?-uu6J#0o-ckFa srJp(+h HCFWd$ -A d&.'*ib^?XXkWBȭ Wu&uM;w[4vHlSLh(~o̝^%ӶV8_ZԜ I j7zOiwm~4rU$0 &nMąY֋_1^>F43ҹ/<aZIzS阹ǝFS>15m'd%mJ_UOȊ|F}$q*rjuyWKl(o$G2LT؄##[Irn81t|6B"E_4aÓ065)lqflV( |JU#>ks'˅JRL*E*er}K5薒Ba!̲D&V3Uwi/QiDE?G*)t.?>^#M=[s"#֢^$򎻌b ;P#A$CJ%7.L.\LD-(\/_u81*J龳[Zh h#ǍSS61p݆jJ ^8~jEo<8b Udy9e+Ǐ綝pj Iŏq?/.MwW9 @/y}>)E99A kOI˚EgAʋoeE[/@RtɄ }nT[xf둖58dP%Mo@)OHYQ^p7+Nn <oUyl2s@.@Bp\`CO*ja{CX6cH?ÿ4@Ǿ&WLaYGE%.Edq䷇<% N,C݊k:lR jNV|Nxbpp)2yNZ;z8-鶌~;6UI9f j(ISm;_]&`c)4NGG :}wFT"SA;?'F[u@ݨ1Yy[6a.kI: YVO}gXwce霕*6ϊn}ݨHPI{a%6:ARM\n0xbWp=Rb>Qj\S*}U<ҼླSU`*v-<V!Y6SWXb+chh^S({Ku3ͦ"Y,!;V.ŭ)B`^/Ԗ[GWGͥ0(_3IhQM>[dm/(Fx7c;;`6½sSmR:,J4$ܭya|wN84)4Nuy >R_ a[RL)i- }q 6QVxW"\;%u6u?ԫa1ѳj;튣p3 H%d?JH Tfmd2,Tw+˻p}: iP}~ٛt[;j`\USp! H r7}܇zղ8 qac~+CX+(AṞsX(촼Y!ڣbĤ#-us\m-,p13Xkx"҉2 ^UEk ȳ"U /(py=n'\U9hktwp0e-W/F k:*->y<-wbf(wvjoDuDE;ؐ#dɣl& 4mHצ F^);)~t.'V&%b!|c=*)1wLp,6 \1Kqg#f%# G;`~>[[BJi=S2j0g=k2϶p\&"2Xz{8%\-$_;SLt<=BYؚ#g>vvQx@GF2ϡczYi\eDdC&B7}걻-Z9-wiS7Vϑޫ*v6OTZY!.ۧuTaoJ&tWxPCj+ժ`v;b,ӧCyZ̄xv=oU>/eIiۄׄ'.T 3էr?kRP̡K%ir WVN:Ϭ7^|`[`^0,R`f7`VN.6lxʤm 8(֦WC`Tsf{)2u\gUA~OwGS7{]k탱w3"9X(e9Ϩ7N,/iQe_ hn" Sd&f\Nyy;uf|#z 0IfZY^\+nmR' ֗#`~,$ع<-ra+NqscMTnFnk2ذtv'fLT5 vwp֎xƉz=A.0m(&ض8ыXp زRǗI8a>22 l)ކLp**_F0SI#<3R1֡kZ4,!I0 6W?S,pٞ@Nh;%#yDTi~ 7r.ԑ.z,A3q%( 88n3 {F.hHs#~#A@p2.G*M؝YTx|T1!pQrp K[S`@w)4جj68>Fҷh`p=80I-$;%|kqp>l2!o7,3`[&1>M/Phjf|3!0y[2 Xj%lԊCv1O=t7MS>i-ٺ&RquUCkM%/SLlTUu G"RϏ읲lf>=ҷG`"ymó7})*w $PM҂X Nz-k1rUhE*=;b2޸α"֟~l9O>KsQNT՚w/+2^=As`A9nw ֞_?u`0XWsVg<#WT'*x>K؍е@I/@a1ޔlYvxMo_kE[w̛.}C8 %~˽p{ M]šcn 'RAȷ쇄s;tkW/8yh]?х԰+d IH//N7D&8,7>2-*sie6'?ʹf $ j((q}CU bX~g5L"fƎQCy2[Z< 3Q r17\lǥxzjXzˣ(cc4D` :vD;_WTp N~E0)E+`i_<쬍GhV9K#yJȳk9}mjLpPrX|LW_w_wiH9%4Ja_Ҷ[RTNUG?e(z̈́2*[;@ JwCś['ERe#>ŖD@J֓.S܅WD԰ ;g,iWHCZ~ptZ"Tn Y9ʀ+8'>(>^(`C#o>\Ib˂l |bBw`da:kw&3+=vjfaH0ױI)~R+*zO`cʧ.iHo}y# L [CD˛i1w<݁8:P rf[hM<3W)J`BɎQ3Ë5x67fehk,6J}27E)&pG̦EJ$ժ@8\FڼR)ң֢y '=AѦ/lO[?)Uҗ.LU&*"z%:KSڼ5 B5_*풜-< =c1oG%!u3m$ɓ+JWS\MzBH&y^iDL#>=ZL!/1dI%!,iji7F],Gl;ylrcP|BPQڪ#;OwP[Іb(\3)Ch]ڠ`afj`jƤt܃FkI~Ռ ~r4bZ N; uªqƺ)٨`n'o^(d@iڢrރdʽ DAevU&v{Nf^Ge!K^Q_Dk3Տ7V+.z+6WoB528?_v &ϸ*!;'`#Pùx9aa$^ {*0R.&/~qG&GGCэ\U"IfE7|:{n/EP+^<9{hvIVrӆTFlRYb])[^V;~߿Y$}@f"Y@\Pjy?9ɹ5zq{Rk(~3+%[G}'ā(a#ℙOzDlghF%\ۧ 1śR1ה+fÎLkҮKcpFҗ1P;ײ#-p.8 `أf ڍaP6 `ʱߦ̞蔡ylY6[[AUwLY|〣2-;@up%bT[BWeemvMD3J*Im6WOܵ=aR;Dio:0i9m~$# +Ύaaߞ}& @cUoZ&E'sAR# FLQa/`߯ #6&m_>/KASkL5tTwo تZ vmi5SHֺ0d"r38lD謄3۴_8d~,OSzQwc|HE\c聓xѪ0yޔ;+oouﵙwt=x ZS1Uj'󫴈cOZ2J> ?ii8)@d~Z_Ov̝ştD;qHcl{|1Pڙ fWeGu΅@!jQO-VBL\ ԭ&#Q Zޗװ +5+|"$JDӐ}ߪAQn#E+akCt?  !0Pi@9WO&X"x`•:n{a44OP,XDw!n2+ߐ܃WUAңeEdam(/q;"\1ޣPA~1iмc Qw`շ6KT.ǹMl6M:}mh GIƍL5ُ2ml-k;B +i&Ia `vF?:`)[R i0ֵל1TwTJA nI)^)mXyy70c)6Ah@IoƦ[n Bf:Mh ve+9Nl~зߵ!N '7P<%To7lV5O~94p6Nwɪ!u0[P=8vd-j?g-jr޳jo*8=}x0bk C"13M l[.$ZCWNp27a`; rǤkNo&ref,ayYDD!7lo8vx6av9Y:͎YOЪTOnMGP[05 )OZVtg]hs% 4wt.LW_̲L n^W^p̿A A[_gk? yG-ͩ> m]*"8`UP߲P);;%^:ԶhM}`щ/ޑn7> I@v[jpl] GEW.p+?9nugלĻEyxVP]Hk,c>?x7ctnX˜lj_CgZ7!ͣml-ѕu1uw N/CAdѺ@qKZ}]]96/?JZZM6.KFtR= ׻]3>B>B!xYa9 ;]ž!DZ@@T$Y:1u‹_$O/c?dkvOJcA P s}uon)1t-QI7YΌٸxdoRss ynF-3ll(WCc}I7 n4YζRzPGp_٠; l( Je3|7/]Sh NQFLX_.YpRg:VRnC˥k^cAK$E+hdWgEV{@JhvT!w[p'Vl&-@;[o*=RNgIl\50wBDم<؞ѫG4|񹣤'nj4LmʞL&8˱f s$lؼ#j(wN1[^Ihx@av'_[o 撏t5^9?wYπ#~چĩ 59!DUKYĭ ]j쪰56sfCqDyHZRB|S]Y g]OQRA8(7jWn^k/g1ZօN_|kHo Ry7"w>값U;KQ>w lK+$h7:utc@_4|[n$F0 D=eC|Rd 5<&@2`Ж8;w>cTc1k75{v^9Fr6UP&.}>?LqLӇ'WžZɪ\N}gDral:Š&l.a&^]1{L/1Ĭ΍W\ x+bEfMK>}lZ:-O!jCK-T.h pi?M>b8Hm`oĞtzz閚}o?_u*.5ģޯԜ4NJL*,e_?[5׾)75dLO{->o3|!k]d&tx2^V;"GU(I.@ q;M F[r}&IPsY?Ej8FϝqGXkB/$HT:A:\iI*83\;&Ux!rcL ]TR̊eGg1ʕlCj"# 48CnQzT~d &B6˦DFiq/ ^̙S Ѳb;ֺ |/ĤG4dph/[.^R5dC) ?rA,uM<OBk'f?hL2 yŷڱ5Bl>`&FkOQ5LqEўT 5(Δ薕=Si ΏeB|*)Nx? c3zۈTc P[ŠQ(ㆦ 2qr>Fn0_gEnIRB>T>S tp (( Bz폁j]13=H.Zp@ U@Cz "Vv#=29g'9KO{8c$@iFdb)Q@߹|:Yl,mxkH&uߤLJidbBJo:Bn4cYҶ"TO5iXE'=%E_Ep(O7wp<k&Ԍg4< QjļASu@a}iwc1x$Q)u4ɾo\1~.vOa ܏ xg`K獏@0{Ms]D +TR.N?S+-iLE1&=M6@~/ 1BNk3JZŤo}bqz$4 'Hh$x%pkrtoI4Nm\ouEuVn!3@`m)VF5gxU$=; Xń5,rL*l&NR{d)D;3m 2vy utDcbflMψ,=NZz%] ]\8|WS' /0Ϳ\xw'~S+R{@tf5id49ρ-}7#,j=EVxk*-PL+^oS7LXсVqoZvIcC[|83+TN>B 9e8x m=O#Վj`LТks;ﵢ&*]gS'/FAArSؠ[ۓ;٨n5=d$D~UmyI/XSQg4:+ EKNI#%S,۷pFJ'Pҧ2S|NSIbp0+^XL)͕K_$G:s252{Ֆ AFAV/k'C1}C-S) wjo#)0fZ_~<%`fUz)qĥM߳;Ěwѿe}>? 7x⸗ih^ *y}``%?+-}-2M#pHB~BU {%P\^NSFo(-ّb֯H%BK|Xnۻ}*ji ϲ䲚ּ<,d4ᆭ%s YC A 3@q>=l6H}͉V\. Yxk„cFS5=5%!?%s e0gԏG`:Vbt+ 0lE ;RԓHFUL)h%@I=-~_hrm=bágY&qWs׀RatW*rX #h "L/Aæ~C"45fʆ1Gp7qsU52tKw֤>zH݃HX1X Wo4}f;ΘSKۚYX<ι0 ڈ 6Iq!Nw}KoyH Akg,t}j=@9,4$A`6N`i}vs[;1/bơhZ硘Fj*Yt[O*&\#{`HB: v75TUR%#~=4]E&bp+?!b-wDl1P1} ':34DeC(Xz\@4/sѪJvt^itOR pRJu݋eW]흥͕pvhe-!7R.'l@B]:ZKr=xzDU-V3|J%Ҁ $|Z񊷇nB_'pB̓c% #XLQxW{WVM#nJOEG*#K>.f8,AҘ6(*Խ7Ak@=!-6HuU:Y’4zT?מ-+WQזNmg fex&'Æ;&L-'&gE]b¸)UfށDAJ# @ɍ^r!"}D469v+ߴ0*;d6sӤøID$TclU/x1U;am+5,"MC_O(p_.KЯ'{߷s7ʲtMQ0QoSf#JR`ݚ.1 )!m3㩏;׊)4lcgV趵,Au~ y`k1/]K Oċ~×(BFg~ET L ,0!R+"|2"0u( l wnÙ$|[/HO`to=v<Iصn~r;76i.;ؿMO% zo,Nzǰ&ʹh+Q=-жzamF^a;_=+hsv/U[f?s}㔾aLékItG"̵1FBLxr9mv|Tgu NsC{NES Gڨ驐t`Sƕ-Y%DyCkjJDkl>&>|zqI2|yk-ɟdVXl"ה#8xP@ӧ阤xlD PҀ\j}ݷur ϘHz"|#n,K% ^&׭La c=NgVǣic5n8I (DQyi(zdī鼒|{Hyi<B_Vبd.\>5Ԉ7xZJi#bb7"&Gn$$΁5~2ykӔgQtdp^@B"KN;.9/b BfC   ]l5**a˓[1 .:þ{`~f ]RsUhig e9JTPҔydys1f%X_S3Qu,͵%r18`aPx׶Z0ALx:ïvMSԫepfToh`$bT\ -r3Nq/o3ף"ck2cLRtr0C,X/ӉFOzϤZZ+j~YzQ _;r,чV۲B$bU2?_kf< m#.ڨup­ tS"y&n\W"Qãald?c^|QQ/s̻nQe5Y P]rY;n\HԙT:Y {7wr*!<] jWS9lt>nw/̦;ۘ 4&zD]_q:8;W-upi޾k}jbb!f:%STF!yKIB^K1y>k)|{O=QN_ٍʵҀhb520i+B5XNun$*;;=xבM8pmjKZ/'qry EӝK;:n+)+IaЫ_&Fw`qڪu YYWv㈨b\[7@];[9Ex3+[6\49K[Z'QNtsDJ%#(2e LcPꈝ^ z ](=ٸ $4c{,ϭ;o?7-yքAC{Ppk#zXn. LHӐ p[ >g\S~EH%Yh\*mq脌4E[%ǽfEKv0X D? e!5E j+u'Kxվ7POtOiߩ؃($IEmexWם9M]P7KĜ:h]H_{*ZqiaI~^~hW1]b$5_~)&-__C e;.KUq2>k46j|e: u~Xa O,} "@^@O'm!tVq;v5Ӗ~s !, O]K6DDhtL5/n`W:,qG<5Tg/KH=ʌӳk14f[@3p¨#dta,M<&~/wpe%zYh\`&%x L볭Q}[3u|>akU*=b4]n.w#澍ܪN9^ |TagxTuޱi9XNJt͵+;ȴ$ot͈ ]x؇h}+lGFX!!#Y~v #{!K^Lz%C2?0Y4(\ǂ=XD gvD س)oEW{̆'[6S9 J&xѩ!arR 2oVQ@s7ڇ]nLpz:2v-=S&Cڿ⮚BA>>r3 ,b~uOHe⪿GbFj+9B/XvNؒ7 8\ujJerOJb4B.(Xmxg3 W]1W `So`$trrXDF\aB ?^ySsc6N_ܒZh t%/bW+:xH/҉"4Z(mk`UpSM{MgQ1_{3X8@,&jhEftnQDt*1S Z .bƊ߸p66k/xh:ۭ0P< vhm8m}v5EsS[zY8c3NOMQ{]dwX}qAB+pV{Z]l|NXkjz݋>8QT;#$߮%n[;T7YF~iiƖ\ 1ݺ'a,bZ/ `@7ql' `wS I5$I*jo PGN}hn*0R`{J^ ~';~?aG/ ka~2~l~1,qv}iVn]X ypZP9MWpӾ;9Ԡ"JowA6=@(!ӽ8npbmi u̫+e}3}fX-e[;gUr+Z'_HGSUs>i9_'cGM+yĔ"fXCwvަ-jݞɌdCs7M"jTtej&wt/jOY@3/ewJuJ Wg4u[*o=C}S0s)w:Aw@2h"\QpĤ$W]f'LoPdR7ȽvbtmqE,bIJA@x·cgYPFB՜g hRar)ؚ;.桴nb&Z>/WMil_>]0_,@|T"!*VdK|NV$C#na#6Sif sv<7 aB(7Z2&n#Y}Ly֛%IdӖu&N [ ofɖ<yp.)dCZlgNF=YgFE{JY>xb[+3 -Gy׃?dBH!2lMIv߳PBARu%[<)2*'! !jE*Sw\JE)Ncp~ jc \3eُ:URl]CYr;Bg#g0!7.$c8hN}`/C g>ͅf Y ⫔]Wpm}ZW7OZwzD[Ś,&N^[`$2MZOE6crmHnFĒ@"h2RW!hۤ< 63Z9cuIK(ֺnámj9 039>.ρ W,%Yp|`p;"82JdS&MP}hbX9M|[L0CLӺq~ j6ZO_u)>q0nפ_O͌"Sưt~1PZ4ǽ C=4} z9gC~t̿BC8h$Ir~fX#PgUIksSVv!R\=wd(="o"}vG ^Dž{ 1T~5]tpMr/kTW=e Z]TUcšB8KkvW^|XvX.3]"2%3LHUI@ߨKCzYti̹SKlm̤ZZ!,G?L>~gqVխJuTy;CA36~.}ǪO-U4w)2)Zq‹r'sr٧i$s!ː3؆?7) E<e煹u\nˌA;YwЈG|ǚax.&VMj/O_670ئ[ GP%'7k: &bKds7\´gT a2>v' ụ SEr'ኲ=r("۹zd\ 5 2$R-+/Gb݃y=9BPw |tYiKD%GiLy|Q2eV `bX'FD,g~쪍fc1 (]| O8עF,4־,GEqaAdt.S$/x>A]HÍxLe\#Hv܊V">e?.rɲσF)ʺ+\[{ܾ)$\:HB T@5CgRߣ7fĝ1НeIfXG?!~\ȡJsDJBlA6O ^| #ɣO2<݈]6)*vn`sTj <ӪĥdfnihR%p?F}r1˾R7M=)C]#L;[C+WAGݵUCs{0KH,PqnQXϖJ4k5 Yzc@ReƎZcF1^'90)n4l# GjWuQ7 &/pUy-gbB\gm{]JA>0$y;R ,X+}LNAS ="85D@r LQFoIsSrHAA}'ykة 3,0:*™,أlVس$$*b yW8#/JLaN `[qФJ~YGk_ۻSgnxvD\#"^ՇDr2G47@HRe&:, كo +92hzRɀO*05,73/KM9 ^;,:SSӘa!,!bSC8k!8π%~ESғ9q2_Y^z2 D9l;nވ}ňx};옺Kt(UOw8=VK9LJB;<'G"]BI1p+ ՈnҬi oq6tqEo@V615S cSmFȣ9[Z39;g!re&z4<#C*tm,fm섍Eߡ$&$6^q̼t/0+ėմ_i̓|V`M*BZ[ ݋|'@$^xPRav`id;E䗣/n+d*DYZZ"?`Sޜn.E T%wxrұ yV7W kuVT"IӉœ:jnCzP@,98,u˛Nߢ-@dTR7U bzRȳIS{: gGjrC*0=mr"0*,MzQi~9̀zK?J_ZNyӳpF% |^_ 2PRF;q4Ns mnJ q' Hab9aGV\Y 7EI1 y#O 4s&1xF*=U k7!T^Zfr;"9  -]*AAHhO=\@1@F+8#3WVȫ='^ZaJNH9 r4um- Tå (x/)wZI4Jg߹L͡"(%zxw\}SL;_bKid#]Zs5AsϏgq34C}|IWL&)5^9j5ƭ@bJڤ?Eݾ8joCz(]y˙ӅvU!֠l#t{k6`PXRf1vrJzBя=pr<{UhnD[V`z?5=?IߎP!d^ ~:`gd\28yQ(04br^Q$C $xuƭ$o`_R-Wit u6W "+]XX=$j. H݈7cUd@9Ha(xbdƘSM00< VɢLp Mq8!L$כ<H T2\UQ@_OY+VߒO4z8+@*h6<|FY@^ vd"a5OeU,iVeplO@zflGWC00oA~,||?n̴IavTr:EjtɂN$j'[1AGrZhgd%Yj IZŦGV0ji 2$2I]6 CIp׆WGYJKS8(A _jĮ .E}ǖLߜӠآ03tO4WbQP"c g߬cŽ'0 FL$ 㪄\09i!LXUL?mG$,%8DFrm|Jz=Od~!Ef&2 E2ɉvUZSM88 97؍j:=R<Td`D [??]\?v;Fd,zʱ`^=^kJ(xjPH/~ @Ļe5aYQz>թE[qr_rQb[0p>-|c-B尦'};"Rf͗-zY |(^RQ$**žpz-w?s)]f= o0z#]%l4@Y~W|;i!z*{W"yk^@Q_Y{L@Q^}z#A~e:gj3W$t0o}'s5)S䝷d5r* !"ޞÉс/M_p C QZW\]1>&l-ʉVK:"BjLXMwu9>jT:vX?ˈga- ,T}ŋЊzu*SW>Ykc x )EH`*fG7o%؅^'k cUZ)kf`A"lX7YH]d,y/q+ hnI8<ηRXRzpg,Ɠא]LWARp }?`uhLhRǰd+o|ﻔE pX"CCi|6STƩ&o{:F'nj 8\-&CYz(OgON\We.߄r)DwLau29 ,ɾ&O֑W" 5vѕ<6ǁ (~\@:x}5v5&'(-xI rV4Z ~kwckWʃ;0)-A^_kkO [N 6 3 F^X]bFآ[lv멚T&ep Ux5_HI3;ԸRϓ Yv#Z?aTq6bۃ"vxzڎLƇQ_r!g= B%4ChrBQ9]BU%ijP߫1ʟIX}VS8\6x_Z# Wtqx[I Muct3ٙވa׍ֺ%.'jYu}f2M%^et Y&6!u&UP'&m>S# {ڱHk_^9D\J`Ye.3[ \ ЦS hUl!m9wʉzd9u@f %6vy:1mh|bB5m`̃opNr0mp1Y._A FE&!vu}C@\ͺI_%]04hG.9Y 7 |WFyZcmʗSkݠ~H?ȼ.Sٳ\t-'d1PeL;z$9N|z OB=86v5_K1„,M/U=Q'Ru]ZG.TY֣r,UPk  Uŝ;q}y#*GV^ @hܥ|5u:L! '^=l=wAlGm-0Q_t̥!g >0FWWݻK ;N BCFY`8CEP0]Vba{ue3q9kALɄbU.1ОC{6gM VE DAqq2ht9B|&cD6˥!mRP-VL>McpF"V'9lk!J@+Ę* -ќ<*NOr#΀rkGD cd1`j_fv7biݶ=9W,^ XʓPK"̒77gd~!E{-%leK1]E3;ɠ!umYwhv%sA$3i\&a]Ʀ!"sԌP" ʫ^e6\@RehFDa+q@D Z͈I0 *l",J-T;U.Wv&TP$ '6< ת72lz5ͩǃa>- _G.m?f<RHФsNU¼=4ᦞ5D7\dRuY%qa8+bIZ"QZBP-,4`8[q(jE1TV;0c;KIGL[cnϪ덿6[Vr~ 1G $EU$ZS:QtuAn:(g #ȐlK4?L-3PF"@0 i2,֖쫷~w+܁PT^j>cyhb?=J:ba*|< ƁTXj)Y ׆QpC/c7)ϖ|ÿZChUϫďNB]#$?; T7Q+Ђ -yfS)sX1Nc/Ad `B޵Z !bKB 'tNbb֭'; ]W^M@Ytu-nxkpn1a`M9gN-{iaVQJgO@] E/m* y^v3i~uAd 4O[uD\"6ij2*a{\jydKn~U3$2="- \13끬dN\!B-"loBH2}f ؊etӾY&pVs\Jk0vFdeOX;rWP>fDj_'Z6hbGuW|A}/M1ΣM}݆b3-oX>#+.a< iSfX!B=+ 'G.2f~oe讳F!QVWԞA]n*C1cK@Χ-JEv!~Na$aTz.--%|-f>Íu:45*7DҏaUO;휭ShЩ_Ò h]LsA]m݂y\͠T[ )a ِN. "+N@E5I:@0l @YY8"j6lC5mQ:8uV&#l薋>$QzW:@"¹EgJlհo|px-ք(w0-UzK;ǀIt !ܒ'RpRuD[ʎ̰S Xa4|௚aNä́B\Ӎw4tl+pT MHe%'u hAlD_K_|BxfsLge&ϭ|?cL_)z#\^YsK|6*4 o8]W졶S:vage/BFd[ZQcyAm"]:gP}G CZHb/l.jNB`T*LifC;ѐ$^Dú~pQfcn;k}J*WՉMOK{іF1ɳ߿kd/^Ed$WvBϼZ4VZIH+$p19ČIeY$.T¨PQ߷L5D^A"l'^Ep%_}3(| Dy;c&g7sEFRw?]6RHb*ZS7,\6R--D>w4N5p~M_!KD-]T@frg04[6%pR()҈eXyCAToyO# BݑdaHlh|g-::}Bb [(ڗ7${~;J*lҿSNK-P ؛?`&v;t,E#D5M >}pL281s=B(o̲U0JB0ǜEf+t9j?/;ٕwn_H&>v14a*m7gK,\Ê"W/Rv3<5tq_~“ja6C_l H'Pm7,|Z2D/$QӤAX8r7aWH|.SzѶ c6$O.Cz&鴀KWyۮaV0}Azm*KI|f_Z=zq|RƑQ4U ΓA&F0B2ǂՎxMEtqvN1"9` G&Ђ]sznh}5f  ukpZ%jGkt@}(V@#2V4JSً;$Z26|d [KҐ\!(N矀?N]5܆xfMCmyKQ ܄k׹0ݟ]e_P}p8гԪm|ESZ[;YtǷX#s̟X,츺J}9R`~eQFRMȌMeAh tj6.qKcvo: q>lbt3FT%#*SPiC ;Ag|R_6FZNOZ8BB(<BE<#+W|k;5fLN mZE<)&@=oll^*enF 7F'moH1.n޳[ͯdT?J^"?i[XmZ=ᎿПۍ,zlT> :%eH#FZC9% ,AJ ܈ ud3}_?6!%g#atyo-娟=Ê{Ѿ=CBU[W;1w.kߦϮV@qW>?O<ͻIJwwšրf?j//&P*C6oOԭT|D6{K7'[ `@^,?F%+(“B 5r˵Qt\]I%jm#"Z2 (1|Yޑ T.)mjq?֮A%W?ϥB4%Uck`!3$%~0#q,?o1ܵhߡ 9N`~tLdE׽63pتr1#mf]3Oq7H}:o[^ff(3x%-4aR7oZ~Ҹ pElG䐼ă!j @,Ҽŏ#`E/(tЭyr`ֽY~aGDxaj"3S[O J9h;u3j;R{+ťA=-5D;.N>DBbcrK`$p(@ Y{up3^QL"#qP|WM.7")-ܽ|}>D`X(X< BSByLv%GP(Pϗ%! Dn[~!RLW)\vzGUGK5d_j쁳5 86(P@0H2L|G?!)r.?}T3?0a X(.3wiĆϵ-^S+H#<2-ga;KAkkcԔ.?8hx|Z20_X?Q[lvl%UչXrZMG3j`@P<$I -)8FGolfuy=ujbRF4*EAJd|/#_q'OK+ڹYc) j5JYWR~nMĂ DU޾z2 CN=Qkw#V}2k Y;-6 ([qc̱sQm5h .:px l(UY\ `GWGױ?ΏXDk0M0rh[LQ3,ܠW~=ˡۉz6l`NjGVˎ < +l  Lxp4\CQ ɐMEM@/}繐h'UȲތ]ָc0J}P4>xȏo6tfs;Z'xpoK cPv sHi?'.`bՈOM坞$RA댹aH. dr;ڪT 7;h3uw a}dq1놣PXn:>'MW|zIó6" (dgIXT>hѝ s7_5we~oqip2 @~ʹE+g}TB+Y"%[yfz㑊MEvQE= 5g,daN:} ʼfmA?eĵOQoS{gT=Hj[FşZŜQ}&(R& 䦿P\ThjATeWRk.0=xDC~W%z.XWR^A8wc=$3[:(B&@]O[|{}8 fOm;XIM׶!Wb. ]E#BVT:dsƪHࢆ- HpYJXxM<OCɥ(Y BL_C$&qպ$&/뉓Fmx]V18hCx'e!~C7N޻>T_k,% A/h5{gHyc `ff̡uGUsd!:`7?`~]-_h|r ` 7(OmAlj%.q>}ޣB:c^m %!ŪL+P9mNgү A"I||pXAq^Ac|@z V3?&t-̉eU֕ݦgÿ].ͦ {X%P vrq'#x%Y b(i4Y.4x #Vl6#'}X@qˣrsKjavm'/\o!9Nm>4N>j4Ϙ'(A/@yKr'`H`N>3)Cc0XC^FM1IJ*rE,V: eHERCrKFED\9wFl~n7W!&4Xs6wT83xWJB^9{w촸tM$mt=5STvaPDonf6ZW4.=.ѣ(Hוxh T,{XQX0hlgqEF q[I<+ Ca2j$wuկ..9M]V;j؂可K>$Vّa#1] Bޠ&RKDNѓչ=╚(oR=2rlM0K@Ag9̾N&A+mgQPO~z`SxPsݵo_P$%1`8v-&ÛaG:Q^Φ2aNg˷Wr.@([L: Os&y'Riuac锷ơAnη.]Tf4ﷲ~SKZLD;xܒX !>{Gmw)3j{xC|m@:蓈gCߦC: vdQd9}b6Ag?;u}?G|x:.zA:W᰻P@?~+{ւjZb>=sX*0I=Ks[?_g ӄߋO)|Cݿy7l6 fB\sR4ɣg%YZGdP>yu1M A:hdGߤˍW PbƫBUMSySis> w2,dli&I!P , AL8|DiT .d͵~&e]&V! r9Dat&&6.يٵ"y"-8ӧbpQJH<-梟Gq 3N2[' M{v+J47ē P/̅4}{|ān߹yof8Xs`.ӊ%@`Vo|Z_q3CI^Lλsb 6cl'm=s(KZM:?1>gw/RiVKe$Kr dQnZx.#'lI=i}75Y $*sARQ6%+ٷyjzwW2)]4<M-8)ni`"-w eD\)>G'%yЂ䙋%=)\4TW;+pӭʩljoC` YЦ78*G \gA8CT\,K ^ QK{M+Xz m]ugA&} Ӧ55PFl`?>![<$ɼT,5~+`5E9Yc(9ˌ`~]-J:_]AncQa'./öw\etx*8XuV mJ,"Ѹ Ng jҐͯ)'稰1 \yKvLh.}m8#VrOj썋u܇X4`4"DVCܔ]v W蟘?dٞm#DM.kуD\wy:^}z\= bA?D_ TC8(/Ζ_ʊCi&&%a6vK oz-6!{3` ~ϗTjanXBְ^#8sZ"/Ն-`<#ߴcm"71tQ/hњ,w.5?~;8}oϴtWtv/ӛ=U$0c''D\Oábm$07:vN>3@DYE9xz"CAyIu&b?/tȓf0`V8J뺧XA6=\kVc5>p8+l?XL$@7q伡:)WC@>3nwaQ`ݞ_ 8AaUCDTɹ80NƐAM5f>. : פYR9Yi4#[ȤKB~ńsZԘAAe7X/6ff ).ۗ NRZbnF\jZEG'y$M=-TV:3V<%p OjCqMFTb-3c6B쁈4L>t5hiXS@cx끕a'Ea@R*|Xqڏ\ +{ IUtVf P D8xsgbBOtx=UX@u+6ʂ{lIsE01ī2~JDl!\tuz"G"(k(V`%tuȟ m3e]>)FTkmwX(-55e袧m .H,QWxRmV4m OQ]CdNםe3R1:f8ܧMb>%/&߾œy~VyƪPG-0wW<bIHKPaCc4膯7y}Т(aRFsL(a膾 UmJ%WWHwxܖ:t3<]iA.8;Dre=D ,yƻ8}LU*)8CdzC?sKsykQ(O[Tʩq6cԓZn/Ɇ1q՞51Deh"ĐPV{GyQ{UI{GLr*X_x7<F Mj?O򄥮8,TwnNP2YWL 4C46 oxc+oD"?/%%4B^ub],~#\KXҢg@~ks%n5yE:ח~Ԭ1]IXgX`+H7sy<4|bBgAAǔjE*"Z!5Easpw 4QyVsKH#'^d1#IGp;]nbu#"ŞhdU ^tfe|l+UͭBg~5 )u>-W/[iCG54Gsv 皆<∲BMvIhؚ7>~ІCMI",HxդS1N_ӣFku+33 هOo(4dco6e JD_{h٢x6T?#dW7]JwC* <$bա#XeǤ`R[#3] ^xĽ t0%-Y)1][q{P5Oo"$aDc};Ty43]W:Ds%'sgE9F޺; R\Jye_8x6Ŵ~Bwy܎ 7ON%N29 u]x@EjVl\wxr/5"y渡Uhi/ҼF#$SkS'|cg.Sn>T]eXG=!.Gw6.maqek'J{ל[I^NtO$lm$0Q|qMZ;oix妸RmBs"_hq[˱r`to漼?%8=Y^Wf)۶|"#C o\ժa$^ uQ"|Ǘ$,ȼ *= vpV3.0ܪ5֛9v)5n,}r LO&2AL ߲s,]ŊR)M侼]kgIiLdT:Wp3jqB|Nalje =* pTigFO>@TYmg2-G,;t?-)댓v),[}'{{egY!`jfKJVvcqy2{@Rb^@GV&o2qk*HxDpd6.F0"s~eoK7쀄G#bzN*u)0F7i:eӔ3s~޼ ؂_{OpX %Z#C[Ӌ рP R ! W~+Jyٷ5l]FEPXAb_lh DT$f&Jcq_toǚxc/H-\z,1sOev c6T[j]\Rl_QQ2!sM6xZ&+r}n ^L]O"g/O vL -8v ~̞o[~䥕|_g犖\5cKT/33OhVkqϓ\7CFMXWb;k@:tSz&X47aaEj<@\ Cs*_$,+TKҟ9H0 ?t'y!j ׾bn06X#O4AgQOa@xUKv|,!5Awp&3'ʮh)l1^Ml|y%g'$tIH8;bmCgHbeFZ{FC%LC12;5yᣭq2jĿJ9.=)]0i#]f 6|^$yHV Y X|hr*2r=qlj9+ְ{BHލjQ"|m_MQ^͡_( QːՅB" (2ɬFImdur]ysoË]0skQ=YξM'rjPY3wҐ7IҢV$c@gmߏEw: yԠѬ Rd|8 ^1&e?)NtCTU 9n1p!t1I!ZY&{be]׍1AyJ_*ԭ=Tuq#<aY> $v#RʃSĸn%j 'M2w j-FAF# ոDzV==_-gx{*oή  Je v7lğ!Uǹ@^a=ӕK.H¤1㤎Gm-SUltoG2>kQ9&X!;:SA7Ww =pªugS50`+ҹZ08OC";X-,ix$8'~0 f~65fL0Ofm@XiwZhd=E,EJ\, [,wP ,*i6qpPS ,ưYeBLNvٲ̭!6Ͱ+ eNLC=/@͍WIOo[!/oPf!˳i`jIlwDgZؾYq6)s_ -wNШ%-2)YCq K"N=+\k=?קu573y=ףÉ&п:(Hjӆ*e/;c6#cMUMڍrufծa j(7jQ\ [s'^q8.D@.#xIVFQ\l WxILKNծ!/9Z' 8^N<=u6}8KY{2yy֥RaOTuq`MC7y$SW?.Aގ)'@R_G& :~fE{CN;'˜B~>rDGJE?c~B>u\刑ZeEu`srO[Ԭ|d1r@WTۇfg7B;'NV}=ZMp=~H5k 'PS +fZ;yar%y@j1\,7 (z $%E8] +ƹn?^α:Ղ^K*7$-:U@Ijw?)+ YHnaAq]m^ZUg,,lAI0 9S~|G/CnhM}_nmvlʚ81##78xx $$r1))Pq* L <^N~6aDܐeB*Ө=A X/DTM3*IxUo-!%ߣd mB輓ɫ~Ñv[JG¼mf\(DUR?]s3*cETPDu?PMmE~4LqTKR9$1Td:>c'nwrՙG]P_'!EzxYdB$y4PDڇy\x-! ^#pG\6ې9_ IWr !Ib_@/h+:9?G8"0;S`Y8Cv4$( Zz&M;hqyumh8.i?Wl߀8)W"Q bKF^*KmEZ}`(9E^cWk NY lv+))㣩?j,ao)6ׅB) oJ(qZzb="NF04H0 'nVRHTR UI>3ŤvUfj5ktR%wc+Z^:1, 0{ k6tw18QJ󕀖Ɲ5`e UD>IZeBvOiey:L(i^Ӓ̌[מJ\7l$eV{l 7*u Lfu(B+J@W)L̪^H Tg$.=Wid>DN`b'7Q42BrHËlXsO*+Uik5 zbVel\I}pDVuC#i0 WftNDy 79E£שWM[{)QDw5 k0s}w8WBqзo wTa?'/$~}!K@ q-ŀ[b}}efqM#w y`) UpD|YS ERZuxh dBv~,VqtXNxUr4"BXD;auu*z&nF*1) r 9>X u8+Ab9 S+Mh6-*.-ƕb_0F|\rfjBMȒCKSCD\O]ʫTYV!?kPU|f'B$Jʌfm5<!>r9%G/[3j=Pbߔ #{/NRBx[S=F3S9EbyQ_qEI80q!SJC{hz,4^k8wy$gbA3etGW` s"W4Lȍz g 1'6]"ʩ?-khH{9}"¿'EN9>_0B'!{GҨSU7 77Fqtx (;*i ?q I&.Ll0H-9uKQ CŖ.nh)WOjjԳw[iio YGg^ "?;%? N@YX1UeP0${\X+Kk<\=W^}N_*ҊcU"1sg)uے #Db"3O ]xf;q#HAvV}!}?`Fm[=t>JUVMR˳VsK,=踥=̎˨dQdUm1dnBWne[9uYez:"]nrU VldN҇UÐVAbO 尾p`GBHzCjO3Zw\ah73R?Yn[!@o.3k A!03toӮ9[Z/1>>9+!Yk% c:(񥽜c:˅BoDyIξ E#FU123/}U;Gq/3@S!} 88>+D`Z$0~38g86hT|oJJ/9^މ^g Ŝu *k,\1J$eYPU`G{h\L۔1A؏ ѫߑqֵRp(aDcdRj:4$/g>pSC4ώ,Dݚrm=Lownp| HڣFUy!O,\o7([ȫjW0&>;p/rh̸!ɎP'D{e&|m7nB<ْO@Z.ELXח7$G|TY"v 5xt_{B`5J,8+k`+>BcNTwTy:W1Nvˌ1}ho$;mu ݵa1C4s4P~m& N.:(jxkr$O /Bއ}ou$B@&jw&k-C~v$ >wssOF*8NoS׸iX'_o:ڂa/ʦIDw(V֒UVO^3֦Q>r"m3L b+~q8lkZ핉wJf7'G=czGIAp'W!ۭ [fhwg+߽ )V!70EEBnV{y%:r#2a9Mn0vꐡ(˄mZ#lau3oK{n.ueb8Ixy: '|sqcS˯ m>9/Ƞ5tǐυ*1̓#k!=C5%Hep$I9"3I]hVԥs1Y5~ ABߦAz=e}7UtntrĺIM(U(|"ʟGp#px7~!Ҳ״;nqȭ6\*"tvڴ780=v"Rn4 0[=oï&o |>Om;8|1 b:F+Msv{|Y8YOS~hn)ű'|jQ?b=M 56A=_a.lM -SaED  @ T8޲d`a0[kqiQK )Pg=zK 3Xr^͝/!M蒇j0f$r9No 2MOu+<f^;AbF ,d'V+ۢtv󍨶!¨3iP Yߢ=i%[k՚ yLhl芷hRin'g#\>@SgV 6*;iVZի}-[zbg8U}+ W?!*[TcI1+$V<тG-&k2zx\co4)=D|?$+J|(cbxs{ M>yc8^p7w #ޓ6v9i'~Ab[b=:Lh4ޖ#ͷ3z|t`A[aE(y1r8S njT\& Wi2zz߶=Caؤ`ZGlJ?ꚲu\Zv=հ- Valbie#SE-󌙘Vl>!2Rh.\9klR"z ɁYvx:bQ%Nf{viD`,9Ԏ{ e >RoO%_a2{e< L݀prG;t؈Y>D20,⽧Nv;R(3b ͢bɈK$*L؈=R8/ݯ4bXԭXIG4o[8CqO.50U&}<}cݴL-duMxE;tiW<Wh>CV1)" }+B&ᦏMzÑN~<Ŀ+7WE5Nѩ_ŷ:{bod_k\] UU[スz LroƞT?jjZs >UJV}206!=seE'Sd.#xjj,8=!JA'ȩ/ڎ^b1_@{LtsB3=y88k RԥriL lH⨢{u/\+^搁bưl x ?t:ǔ#k ty9C`+i)qJXPy6LY'k)zոZe|yopGĸ*rY@:ӻI#{$ȋwl|Mc2('b!bu)83I( w8Ꚇ blAZkGG -=Q|rH~6w&] ϣbi7&3 q1b_NAЌ`2p)ƑBo{s Sht$Aj8ʏYƸ1fKj^eUO# /+hBGWY{/RE83mJ=x;Re/X]Lܘ+썸C!|4 @$U'Z?!l} Ds:}dD}Й$P{ҿ0ZJvaG]Vv%VO>+J2Ia3h2N ^X?@`(Iptڴ|k.r}+tVm4Cvxx25nhXQӡio *byC/k|JGeFZ/\Q& O/aޠX㷐fAGQ V[#oaRC C"8\v z -:cMR:n'-L25/ChúKԨ4,̘zD ʔ~_n,a2se۶:JѦ j `*L7?*%@L{%sMd*.݁7IL6S};&< %&eyDAHBF 8)WhעOs2irmW4~8EF=纓u;d9U8ݤ#lH_LSm4 hΏh?`Ӗe/悱~c(:􀺯=҂[ZqWK6LRws"KZSu +P7l6]x+xeKC<Ǽ}cg^4~{ ᐁFI-Z2^bI!# %}^ký9exs|Nz3RFΪeA&z3,Ŏ7(v%̓oS&>|&Q[W&Aޔ7'q/6k`\x0elM?|^AZOQ;OU&=z0|t]T:MjP=,"nwSVVt'K01I(]m+7 {n&ɬ,wen a.olOp^«6X~ ņw0j .1#0.*ϠձFSnh;|"槾>=9GcƀO}sS ,6 5*W .B>Fuj, o#S(FH&-,J*3mmy}eR3 38o/rOT%{I k5-<ƷDKwSwKTouN8jI-y^0x=&QmYbI*vg c`&bc@aӹ̚} ʟ>dz 5 ouW2O-HϷF7J 6t|34}JǣþNj-xCF Q-E++{UfZb6M1Vi`Z q AM Z~;(\0 å:$kV,-.8 @׏P(&Q w 1r,BNVA5wJ*vnrօQYalR WE|G Q:St?ђ foʎڟ띳a&,zF IOd?(=S9O+jnY.;' Kkq 03H9!OOLg+9!YfHwPYjSI*u}"VHV@!Dhm:96q)-"_?$߄ hCz7^*V[ՎL!&4 y6@L2| "LXdžϣokon V>J~[r؜  /8xܵ!퓺&*\V_M.{{wTTޏs&lBQLq~E3׸` Wގ.NO\Pj\R8uo)\Leڙk}S4q= %TFucRVG?29e/NTeD"cr?~퓽#D;KUuc'rȩ2\}"䠹PTv=3Ho*}"Dt$%a5?w:-e1eeeS(/x[)8L+1Hv96Slj uYgYt IKoB:-fԁ` .::C2E3ǿtip[5Y e*@~%z"^2Exfͱ#.u%ϫũd{ $fg@^0g.Xc;p+tL qU<ɼ\e4 L!:8H6~BšׯS)xER`qjX:Ub7/`qC\Ő4ÒĒ?G61i' U  ' ЊDIHHd!6gZS)k}wG%8a0]p&<z2q ` 5_O,{h:UdEQ#fĿQ %ϼ.qeG賰 P}^CGZ+B>˘#h _C$vmK'͗dq5H|!TLؐc(" _e,Us ^܌ab8~[Aٮ=RX d$ck.z4BM%+ʠ H?XZ|n-0]A,ŞF*+-H9mҴT6Xp==fäj}Ye- ([!i _lSLޠ*g׷xRcae9| {LE2qTdoDHK*7>d& Ka)!јbR4ƈN4XI \&ܾeS!SUyP),R&R- mGi{!_ŗ+K1P-Ń,F<@kEQM?xЧ q|&]1-(U]1ҹN^9 j?nǢ|ֽ; T,^<a,ܿ2I؟"|(rߥ{&$C.CUn#K3`|HAOIP`p/Le9S\XKO}`r% ;̈] PuQ #0ңlZ%P-B5OS nL1ͭ&U[ чuӣ>{9婿`Wo`CwiO:3bQ%h$B!ڋ'5V8wV^}y,x}v_Ii 5I ~z ޚc[f_DyfgD%)Y]X\PlPR99paf4%p4Ȱ$~=wwt@*+gA|+čDsC ruؑٷ% _11}Qe[Ь~% sOZS:cm ]x*t!v+ lVCJdes9g5XyhGm\-GBn!uq]L66,V b+:t6Q~K^`(96 Q_4 ESr5bnwog5:j8E^6+ 6o!%q)f_YM?+ɸg~]l7!n ;..Uuܰy q:R|4(&hoGhP z*W$s -S ,.Y78uYkKeZszM[O$Ζ>ͫ]Lc4 $t'dR<O=8RdE }ص@ϲI(bqi8\:aP'va3{ʞ_uxg^8mcnWBQZkB/2Hy@jv-#}K.'B$$rӉOjV bZ_{mRB5Hz[( R' &jP;W9(AI: 9[ʯ7 6 S( D_6,kFl !"ʼ\s &pA`ۂ>)JrPFhi"4$8V$YewR/ڃ]e,آ`1E9ޯ4/Qk[ LϬ)G FrWGgR}e3zvL ɸK"FߠƑFs$1?YJGa9'0kF ʧ{$JbXފV`f% TA`29"m"NNQ9Z*Ygֽy m_dTL:z,7S/S]p!] \ZRB!huٵ"}\.vۼ'}7LMʊ xZc"0_mmkR{"aBSPM-Kz$eP/w˫T#~ T;0'a87yG`.!!LK %K90GaMud6}펳"ԌXV0$Ɗ\v.TNTՔYHyT4 3Q݄%LUp@mdNE}~ uIn |NDykiPv#@[CebM*='=# 'ܷ@.No8{Rx9S=x,Ac1G`zw'r3ΘG*pK&idcRGӒ#$L}IuUa;@&<^ks~YDُ_Xט>M,sCj7H&mM'BJŔA (纻awJx!ދo#B?^~މQx=*̊W-Mܙ+s;h\f$)bYLcAuϧ1^<4@ғГw\W3uN<~}ҸR!i,;T8 bYA pEI6!_IԋPMĜ!,+@Lw9u9ss MDg!QكyGF:֔4Q *B},^Ne\y<EVS `Rآal^3\vsVl14(ySNT0~ܿq{x Eh~7ԭ""5a#I/\!8!{M4~¬nC5wY8=8z|ư0/3RNYuq)#juR췴kf.ڞ#|>.^) &3N9NP&50t/} | q$ bWR/ nWb+QcF=L &y><7G$5SכY+( p=_+-ofx j—`i*X5*G7(+xI lR N%]ZrzEOxӪdQbHM 1("rcydG@k* q KkӊI"PJvQnM?͹TVgs:VN3q*Lj!IR[{z;͐,:m%?H^,clѶ zՋ=}ju ۘ׸ ̎2oi;FytcxHPD/V%9{ "2ioʚ>(F7y]0N ?QtyM-V(M;r*eϓoU4vBt;Ia=jS`EGA1 hӦ߻ZU᯷Nco ]h3 Gh$;\HeE9ܝa9vz~vXǾ F3%>U|1*꯭^e)oz˗Qz]R256'u+v:X^vܤm70PuT]r“̐378s!o7"7"#BY >D_GB.x:>Y& 'yX-92ʇ`|#cIZtf}#w.6cP@?!{ ܾuRhKMHՀ/c b?dWɬ{/vai{vԵ[Qd[AXВ gx a) z1TDa=iiqu^c"p2sy.𼦾揻˂& g(*j{oWuS9"V?&+4֋: 5"W1Ծ+OaCq6á\,װI?dQXOxUw+FK^)6i ,ߖP9בޞ;7ZŴـکହ\{J|M%MRmo.gTzۇ;S)XI3"G RȞ+yr?Er$Kx!t\5dYYJ^/׶~j@r-k1:@wC_ڜq)!?~d3k=-M/?U E҄,Y޳+xNVv}Pl:`Hp9KW*[C_-"JhӋۘ ffT'ΰh6\۟SJy@ X# LGVItTSI;7#R/*rV03Ӗ_ G)h2͑fѸޞ=(nbl/SD`럍ޑ.?ʹdZ?~gh.idv0>D,ov"Ψd d#\&fTV ̩ }Q!83$m{%wqY/ \E/a\r`kG=5a!*鵎+W[9̩:sVzvDMVG_13HZnli$&ԌYc))VM0 p$??GT `3,P.b[d*ĪNZY3ZLMu,do4|?2pn372as&BڹHA)Wr!)}`;8;KW#w 4r9Q=ߔzp"r^ըV09Do)Ojc-fz` [SaԦtwxˁĕIUJcTvq4!oQw^\M`Xos=1-LAsѬe 448mLǑEiƅ6 K_[o$V2QYe˞Ʀ8.] Bt|)/ܜV,oO]}(T'xMpG" 5;<ȡ'$CEhK2,M'nv]%Ks:bw) \R eFye;b.)QfukLk G-ziK.?̮'m~BShE7peELmA*.ae-?E^[sM\ligCF06 [:-1CB)VKg"dTXywݧYn?u(/g_^f$ %C@6hD*g4_qM[2PdT!E"$*y|5sB-gq`:p 3PSwP$ZKD3|\oH+}63zQGi+;3<ɣ*(zN|W2,,a㺍D#-[pV~ޅ*8cmo-?1rPÚJ7ju#m%o;O4!]yb\+kMљ ނYfQd|,4N=)U?#+߷th^f\h yv&Fس3Emd8:O~X;/!rj 5MKi!%_aʌ'"KUܓ?eJLdhՕjD ;oS%ݶL32nIS gZFcm`BȒ9WXhTԶl>$jQ5p+1@óNd5?B{8XG13ωw{N=k$)PDꉏ <$bj?hf$:;u9+ޯPȳkr'֋%\^)/?2hL$|qқqHd| %=\ż+/nS‰ ) >'8hi-E(] v5_N˓W\ 8/Ehg :6*W% ':dU`?x>/HY /@jv*%]Ն֩иYrXeZmi F.M) !wf0q$S\"M"4z6@.j`[l]|5U<.gap}^ۑلgtB\] 1b G?z=&p:i=B6fQ>o;NI2"oNW2| .trj:QÉEŀZGH%@4xۆBmewv΄Y 0:@8=F/r_DCGw6KLw?[l\PJopIuSBB Fn>T'~ 0hki G-u˥z&ڶ+H2Gs=n\M9Qkq1>HfC^ G]psz<,=1țʆ1پ?U@BeT`k$R:)25~M@c =%**- J"klSF7N6uF5_y'QܿfD?4`>7D3gmGVMR 'GX"ɏ>–1NQ{4}b-[еC}ujEqܚhJ] <9(p;xQ4<}w{ť"f8BI1`V%#pMb$Egn”,dԘK^~'zߪ]gp م e*, r !C%/9FS% cHkEi&SXڣ+n=4U@nʽBi;b H$@==%n_uߺb1]]|q wM#9Z/MyfXUhFٯZY[95qr? +8aEmd5l2hP vW~ݟW;FFfj%Ul*>Kt)Qv)u#E]y\b̳-}{ 5lFuvate;{<|rkCɏPA[GO[W4@o4n9اS0 Hh{k`k2@[؜;L}&ZGa\J%>Ity焂=;S/@BRDsR<;,Ur5QV LHXuQlHoj2~wO 7(9P=df;/5b[~b6e0m$SAvb͊QvR Nlfgxt0J'fGp{pZ`%)s)-_jvE}x?<૧vCb'8FA9cM‡TgXޜ,8D(Zi%d N(GaT6^!ӷ1"-ft"}~8ٔK#RYSl\~nKpe!RO@}s5bc2[YDδ1R͌~\*ܠ _户o{;l ŸA W0b &goű G&$@#UpHɝ>Y|DUV(ԤyM=#lpNm:p<Uto}×)م"9*:g @4 *80ݐȷsYh^rgiZkPFaD!(iNRj@çjHN~C Z%?˘JޛK!_p Kt}Njϛ\ؖGgXET Y֕ Z%!]Nul\E!.ruF v/[;XjP?1DWŴaWKU@>+9N~ϻ<9~ʹZSyx$. 26TYYB?mWK@ SOjT$y%Á -Cli% 2RL@~CVW(dG@vEjչ CLRJng&#HOK1PY{X[u@1?nU# _ʠˏ;^v~2;g={͎U’WGiΛo?V$e?WHAa5qzSύr\"2v狡n+]CK-k>ElW:jgQ̓ ^jIb۫Byl|#ppZѪͯr cSWdqu烎U&(13 䫫'n)*+jΪy| j NʷrS[_BexDjQB.uy۝{Y?t$ۍ۠ {'ʜ7l< O,S-坃zu5O]fZgam@YHCW\,+국1iUWՀ{7CJwfd}wpgקDU5O,$%~_ #hhw_x\~EiAs2DizaVu26gU|HN?`` 搩1B?> 3ى}΍yOl/}߾V?Uo@$5O3!e0S)aZ`+s{K43NP?eꨭp_I\ƖbF Q#{)%`@~+5! E|ƯA3wsu`zan, n*]6^^mAy4?RQL!G/Á1r&= 8lD]&7HfB欯{"HrTY K Wm-`P pU+!ެU|icvgu/ݠESE$JpԿE3U;]@)E%D7}TwB#q2u1;șc-sӽȅ'F\s2 2D' "{la>Gy.RȰJ< r^3UWWnNt D~Ga8j2z2uk;zG[A*uϓIβX\_dߺg妁I!ڀ"uDMU2e)ʺ} $(R5ړ9Iq(k$ldĤ7jZ2˃A+%)1W鬣d3My]Ee\F*KRY)xjrvCu{b*Ha= PYwFT9FJdL/c([DDƵDZqxqXE*z^Ԇ`ė')%NCAҬ=&iHfս}=i+MEBbH7K),&؜*kg]0̗Ș9rCFK}ѕ) Q0E'vgFT$UQIKE!Ҏep8cHS0}tscy{)Ɨ">jՆrx6#s`{& WtB ]ne2MH#NHWeڌ~淞 y54AA@NҀ)_Φ),YS;*iK8' /WN}upڴ< qٙv*bX/&yoIɞ:3=FAʈSc\kZ$5E)0O;3|[ #c jme>]v_߉3Zet90V5;Vrebw K_'t4~{~uyO'CVQFUlS km1LHx3! ÿٰlkB܁^ecGWìxQa/$i~l;I~Lx’έDþU3QkaW lepSD/i_{8\[j: Y\ޙMXh,1E2pL68s鉲]+,:`fL}EY*($U3)+5?NUQb* ǁĵ w0d00<M|kj* )bi 㝃z5Olz(%RNhhi=8'eA Hq}mOT~F!H4Ye8:m@cC y[ Ϥ Zpq:ӑ}N &mwunaXeE16/oTi?Ğ;33:*7OUUJަ#iP WaEl+Tv5B 'h`AkUz@ YLhE_wpv1ryAp˴*з ,Ied+|&`62Q;侹Zv_N+euCz2tcp *-%;&V)FUR&&<QMОHO zrg%5|89du "Ngڇy0\/* LkN3u"O6|=ШՌV͚ n|;Yۇ1RnҜ\]FëqW{{vB`*LV?XC>1'*8DS.Mb}%O~ϼr: (]t %\ #dzewUm)Ur40:7ih*:ׄ'9ps\v`\G%|6sIi8Jr;;S FbS|JZƪIwYzΦ vP@2"kۀt:8 P0Eø ~?B9)0ۄ3ARgs'9%A NUR Q()-)׍8jVex#4Ĥ!2k5Ʌ5@ky߸2i̘ףF>s5V6XjM۵`Lxr #dc<YO$+Tw7 I?lXB/[/an~zA$] [8Xl9eƋ垷ZfN㕝Tܐ3`5MUQ'VA _ArdK$vag=TM<=*`'?,cX t(سL/['eVWԂJUO(9af+,uo8]\D<_Le;=SHAzWqCjjB=e4Rݕ,f 1Np6I l")#r PĈXYvSm.AZdlivn'{ PeZľsۀ.7l4fE6mu`y[nn;9C9/vE!՛ + ̿1näՇdTkΦ'PUFεŠ0.7 \T֚^DW{QDdOׅ\@e5l!O3 aZxa}pG F3PSƶ{1ҊG3ӇM fQ9Tc0A؇Le覓>"5*Xk#[" ^gz]0hi/J g?_q8F: %Yɺ;sMq{ՙe4TU0u6M c-0уj޼!#ٓg^⤟N+1՘7W^x -X0'BOav3vs*hA8:a-.kX{ :[#> >?rN17䱥,\-==Kq+ϛf$ 5t1&r`G!C{!r}bHESUMX\#xt Pqsޠ ޡ5&7~peru \n%W_\V  3}7 jwloCckU&C2YZl|q'`';K`._ʯwqM彌1߻+~(\}DM?9" ~$TY)[' E`O|Dv(v)n/W9 }1oTe&{qSNQƤXMR"uM TC]mʴ=g wMd.s+-6PkYRl o%Znd, <M5P8z][x2ĦdN}.K+])]I`&xט+,Z` xԇtCC}k$رK4O}l(VDch)QAd!62#wa:/E5\1U֞q#}UaD:J:D|Lu!6DYw|BkZČ36! f C K,=(5LB4 &`! ^%Zm&f=mj'yYP^5nd~C+ SeTe2rkJY./ ݾ$~lĊnG*`IIN Yp naDԏ")x>5$aNJE1o @.>>d j efdKd`5.v>xZz< +ő]DD|0bT](9V .5Ք筵o?%kdn`қLdpTԥ\}4\mnI@6cCb)۷,Bb6Y\ 9)Ч BT٣g0;F?w<-2A x$r iТo3#eaXŠ|) Sly!Y-`GZ#zϜ5#/N?o]wi<Q`to 5\nrܯ< ?}Up3*05Y%-f4M)Nǂ2\P~ Ω[c&Zt_bՈ巑t$h)) `@YC_zTD|Kge6Sg0uN4tIӊ- -й~X>_# E W?;6pWp n|_.slK@J7Rg#R˕ː:y=p &CxIՉsvigȊjlR";d-oEKy 2z K5BPyN6k? ڇKf͉<(6c[<Һ')-2!1Eh4sAGxh۵e4cp`!QI^KI?*2*\{ ?RKoINȃRz& PψkU)SpmUfe\a/B[\MDJ4L@Wu bۡT(PJ2ہD'jǥBǞ6kt m 'kNI]D-<5N{HKxuܢnXmHzCʴ*}j.=Mqvy(hق͔@HZl[LWoϘJ8 1C؋:W+Gg'ە7dm9xYWŜA/ @\X/VmFGrvY&kʾùD0:\`}+:]k|E(欃d|~ّ B{9vbUvKl#T{O,–H) ٭L/|{"zFiEiweϣ"߉($FF&9yפbo[ey{"Nಯ\>09._kWR{xڄJ(~-|]<943Q`n޲x*AS򽠑}x\#>WůB=sXT2nM{)$;ž@N<0w:N')&-Nw8LYMI( fY4Hẃ n7sQTrZ/| 􊷹=##z;^֜lIJ UCS8cV{FOR+ h;<;gI7y\v:R[^)= cl.yxoқ^k)PSdmL6aʹQ1 85L|*R !7yιGL:k̜'W[uSzq1Li]Yn?l诧_ +^ԭU?W|~!2g5@C `O4=a:*/X _uIj*hݧOġnkgݣs9iЛv F.y]q_p}IQ\+5cD2؜7Uwea&?4^>alꨘ &8G ixlTZJpGVv4&c6ddܻ)D{=1Q?t!A+>` Xho6Y [Fg hh½nó+Php(Ү‚Z bi[e-+qj ,+Tr.ugt:Tѥҩ;{4V>̦.dv55T?-lM"c'AD]R@0ѻo`B-"K%ғ-h݂N)[+jOY޽6vR +P*n1n7/Dr<xIrq~`$ai{`c,IˆmH9/ H nz@ d ^@0׫|"k6y^m3{yqΞù Kn$e.xr[AYh;8HE!P8tuu&vߩ^ٚ+3qKxޚ&v@8z*{~|s L]}CK<~U7[iA?:mcz$*΢Hhz/!)Wt{BYrWTrZѱt=V^i2A_wJ䤛4]M5IUlLPz?DEd[/_iAg4ؘ744?EJ5'+Ι6_'4=sue<~c x9 F7CŅpb ƍb4 `rIx̝Ey2Z|~,x5QO;VE7m- e S/Mw\#8mK!imj![o`0;:LJTN'W$|uoH4%g]S;fa1ygV}(rz@n>m3>Ԅn#ɍpq..O qߵb%bv]3 .NӉeVD^Iu-.X/\ !_Hqcu'v" z;~ \L$Hĝ]O{]. / K1?z޲XPQ; WԺ)ؚtyLo/'P,U.Ȱ`{lq >C|q.[&,2}sxF|æRb(5,wAtmja_\Wjj27f~@vY=u8}{R` 1hbT2 |XtMsu[L$c+{~O-?fQ]n?}b?{^ɣ0ydxj;CE`dΫHk=IX$`2}Q#Hw?D zK\K(,VFmhi WUxjpGSO$ɫ@y"݈@il<=A{C,)y k R+h[R؈5. a+T1ZEʴ͉լU鋯8x:=ʌLbryLDA~܇|ɔU&>b #+.'O< vefh*H,lwxnD=հs?0ɋk,C"fe͒ozo{^ݽoojj;#\Qm`#*ܮU4|}6! .kzu*x&[W|f!qKVen7N>E8.񻚡.5o@nR ;$M k;лZH,.I|_#6AV경Q+0\7 :}շ̵Ⱥܥl9Li6k(ޥ˽PM&bY͋i⨸H{b=h^1mQ]fbo~L}VV`W͊D 1jíqn柍 |ŒSGtey 0&=Iܕ:;YfdHwAiJ0m ;l|Բ*x;GeB%輯YT`WJG~^k5ib^gnjSKYA<r\jugj%8vSP;Pb$3RwNԃ t{D&4RQpG)>;"Lx0ǚe6򙪄wǟí9YP#txD>kזaƮ3oK#XبTss/Dggۃí<f xP2meu">I'?4Ty]5y & }|} P;TY wnoIxo`T7B< T\ _N(]K.;3H %(Hx76]'!VQl *T&bKؽ|:]u3-a4F2G> l*~t r2Ez+p6p;z4Hm%T.]3kjӟbQ~)L׫5m_ rܿm8</~7{Zjk+S+1]?oCejȸiX0)8O;~^aav[g\"v η;-س(~wFUD\M&w$rV/$08N<},3}QQ -&|9U}oBB4w#qdv״e|DS"<퇕ríO$G(#ٱWCىv:Ȏ[49SBrqF{p~CJ5z c>W1m~kjF9;ki?wCڅTq&jK*˟Q1Hqw{`Bfw=,O/u V{ zϕ&_&=+pz$OtmS 0}* s Aڬ&=hsɯ~F ȰutB3sNKx@eOXBn)9arSS&pOG{6=Uy{J:NrԾB̧J &kyv0bE\6ܮWj4 }h9(o|OV24_b^1 [j1z)rBAomMrlD6@͔(ݜAqI %YQS*[jV[U.˕ )c;1 d]/WgYأ2tޖL`VSVOfntK6d`>]i\T5zdd8!z JM2'FywnٱA8Cj_APmsm5<:78`Ù ]l: Զ K}r"gG:4`8kd>/ WV coJ!MR#=[y\Ž݊w,Ћ +ĩMQ[Hi9ѥķ!kCmsԊW:=d E')+ }\Rp:`N,+q+Sɸ40smz1zݓE/ؗT[c8 ڳ~v5*xo*\){:oN<:Q#b58j>8Io #/ pWTܒ5~DR x+Wm-!ቍ/`xq56$b^ɕJ^cm@ w.qt60(jJ_ ֐"&bZGvc_esD]`=njt,h]C$@?"JSxڬďʺ~ylŗtă ͮ*Gt w )pgG^;FFs2dz[汓L2.z,n{^(\p/ώso ){ VNaT"N$'/cI|>4l~dߐ( 4c%ӥ CIHҴY)fHoȌǑr^{K}iV#:m->Hڻ7~Cs;d is8U5-ְ qO3%ۇCB/}'y@;ueBc vMTM(ym@! ˅ s#Ny3P #q|כV8L#jh?+y`EؼJ{kM+d?/d8dj^Sآv*m:i#^՚fF@(\\sʉՓ8h+nh~NBSF45 i t,}+'%1pw{ "|(}K`Hh5?D;@ㅋFn"7k%wɂeA -uNpg-BڊFXh{84zj3%<)Ub[{n@غF=W`|ނ`; P\#?z4!Y}xPIG#њiDg/ne^Yfz+n{IEE$'d?{n  q,PMۉ^bSs0RqXi9Sʵ{/;zETKQEQ$w덥 8f>lxPv V}6D\v! \$=k e{Q;W:"+I} ]lb]Z"F{4}vI %|4잱EIYeaddvqn ,} V)kg·TQi~不⻖E oK\(h2.@_ZC Nq *"Md3ӹ at;Eqr-(rS[J!z9E]YDNU]%P:^FsVk!ϞUw)dϽ{Qyjc#\DaQ 'LIvR؇Ot`ٌsաw]%b*T+[0Hns joSDn()^Z%i 1~']4 ^apmn`Vm1_7i})Ul,&imzA8ff?iS0!'QspW?!C}39[z Ş3uP؛t&Լkaw0w?vR!IUA|ʋ?lä1ڒ71J4iĊጛLFXQktCo^^QjQ+E{u11.;]\Ooze(lU=\iyӯO/`ΈnJlY4FM*B n],/*NR(!A|0z3Cl w wH'˚Y&kT% 䠓*!a[Ȥ/W*[~>j,ӨZb`]텪z|0arPbPS :1a NO\-Tק޴sD_wFxGo @ӂͬb[Oqrq~Vxq*1wOS ݱYBnj{v,ꤢ~݈[.)<P& ,yM/Qu~r_ӽ!vX[(HȠZ}C8;UOs"b-=m,-!9tSػ|,s2+=V 21t!t`^3e4L$f)S8EC~Ȅ?Y3H~(eÓ6~si{aK mUO<+z0XS$9@u,yK7n&[@|OՕkᕾͱu+$z.>嗞@hۗ~2rW7{بѴ+1CzGݾi(pU".[A]ƉLaZ qh'ĺ! a.YFt>Dj~]{^^~*p=dBُW\%NzJ ypL^}۞A;OVqv <.cukO)%wB0rfU,mn&>axKc$$cژY<{&{>PJ䃮[üDtHbڧEOaNH5UԵ}8]!\s[rtNv xy6E s'#PW7$ŭdcYi$X& ]/iy6 C(" ay^KM7ۺ&t=S!އ8ϭXuW=7}'QJ7HI@`5YcT``#eRĈjJ_/$ެF=?  p=@hk9|نƟ($ٮ$ʰt%>g0*-H_6t"<3gh{+1aq,(RMJ_>] ͔5" \:Kxɱ  gwcC;{@](D휻*ՓHg<Έ٭g|/% advc.Wdlon*Ky4QBf&09iș]p ~O[ Ψre6=3 -:q9?վ' C'O⅜dK68Goe ޿Ņ?e6 \UUs(] >e'䥰piG&9a@~sFc-q_se {/AJjMLyoa7&<$:~c ՗2ګw«j Kխַj'q|ãdf*m' ̌kc+O5{* \ՙyiY{lM~x%P{qbF@.q t/]*L'QYhrՓax!* 0jʔWwzSt;>BNQ٫vY̜"Qkxxڍx^wOPǵ`.D "}4 xSU,:N(r0-! !C\؍a~" 1B&W2Χ. x:k S}޷.OQM_&gt=,+VP'Ю50! ] KE _hF舱*OnؖˎU4䝇h|Kl`~pqj햃>*'bJLOO'W@4MzH1>&I)7Yg kWyTbb鑍~i2S7df|u5@O'MnMvVoX41j L9ļmA.<'e3j'M6$Փ%B2xغ,~TB,Gẖ?O{dy3#66 \cgǃZ$$*Wܺvᵘ6Jpfլ4Pqb$?_B9'~YwS,40l*1ٔyQ"#Ό|%F#hO~yZM`uAf(cnb*bvVx4b24G'0k]:0viS#ХȣF8R]uɢΘJ,=)[Kx%ت yJ4G.hk ,%cLMKl͂Cvuq[ZS3A@*YAKdlϟ?YFM$GJG- Qd/ŏ.=wbQKj$+ٝo0EW5O8qJ8hCyYyW_A#K tO"6gE&r/@W~wJِ]s@:F#-3Y}`0ġ4OVZPIsvO DdC e^y=;Yh SpNlsAe exZ^{u#4c~ e,c/~?z{*W|!zrXj/tUZoijKrxS->q tKp{B3pUrqL+L.|Tҋu?^lUSe/񾸞V㮽G~ƒv7;\t{X%3988O'2 R>=^d/MxI2W&c+1P`#Mg֥Ol-lLk8Ih9,e%Ak@[:2܏hMۋkڍW}L?fޡX#Ȃ_\X8PQ1WjL[zڕ"j߭†9 > EH#z&#jQdA(ɼIKܵ"O @$904s-8IF/-R?08I❑` vƪ(ntYN)j; \kp7e p5{2#vH,tùz*/QsC⧨nneOhxbM:HͲU4ҷtQѣTo?Rr=J7 TUz t{OJ?s Kj37i?;e0F?V 汌ܧ'HUUV(r{ms(/+Y1%)_66U{O-2ؒW@a-#GB 7,Wڰn"p[,ޅ,юIfDΥj칅t`<% #8ZmU9 -hhՓÀh9kSL(n'3 N[D7@6G;z'RUfJ,gLΖ2<9/4c͖a9Fx5Kw7[9!Au++(?wT;2{i(fKqfbX\/P7^||4"Q@ 9/k $uw$c=i`,ʮ( GRPR(HR?)^*1f ~ +AV>m Orb I&\5hmZ}l_ܟ7ʵ bB48ۓS`*x#J(e$b 7,UF /TkTVD>%(/i HSzЩtۆS"iN81f\9yxMY["GNUf@~ AQ wk|e@9Vh=J42xR_hڤ0 dvAl:6>ݯ4ۄ,,(QMLƘϔ4a v5(a=P=`nt~9%ךG$W*CU6g T_/Ч{4]x]Ib*geU!ZI=H0ά@:]4`أ㬣h޿PL %{ "-G36 Q B6)IJ p"L\z J5SEI vOT#Db1d>G)ِ[~wp/xsRLj 8h(3i$J_5v/ Ӫ|v2:o$6Ľz t kJ"t EYlz ҕ&Z2lo KY g13$,tP7MG$N4!zI{ V$YN3>_N߼ȟ-p!CN"<*=W%$oE=!eIӼ:?kc$7=X8v:#N`։RR8eۭ;ᝳ , Mulmzߖ$Vg#3  82pH|۱9, ͱ9Whp)1pHx]CZVTe⵶D( A \|.{1RJ0hd^'}F:)֞3̭"(^%_ȹL'DAH6z*w!#UA%%Z3@:t,ùg} x[:-Ee_o.Z/cl'>INE%TqtXV! C>1PhPCCq9?N]-gyP[I\(7+pܡ#Bjh2`CYK9Ā' NtA~@\PaWx Д)ĩI8= sKה`֔AlvA7#n Gݜ e A}ԾǦ"NL&ы ;9c*:,-.aq>b`[O#bE r*#2lysUԞLRe/(!RD)8runyL)f[$+S',b,IJf6H.~M.bv'b.Ŀ`JjP +_/o1ჟ\mTf)m_8l~MMxjPkpba2^4X Iͼ9+rP^uxrRi5^B)97EUs)Vi5J,-xLp:q}AFb='NpM໹ҭ?KVe9b]6?"ЄYv́QLx0?S®wK#5KT $z֏_X?ho3;CJ迓홆S_.v*&;A=RnHT%7/> _C$$Ɣt\nT#ΑU A>Am>AEdkơ8զBPTߴj 5ı]eJLOdʡ2{t9-'8֌B7 *Qj6dmC|]0i/!BEW ڀwMU 0!acN[uYc./tV U+%삱 />̩wU f<+6of>1^XBlΡNh21]?IPw-z|ҵ}et/$a RDMtpc쿂1t'$pЅs{^/X&\EЇ,RW?#`X:?EY̺I]k7as뾄Kn8f*#CZzXy{Y(xI=K _xRz-jYOL\Qю_/`Rv 䃬0"Պ`R dR(p~e:he$rfqkD/} {PsT[ d?Rn>'[~/?t<[ƧǾ8&"ule /e]od.mu;>':d&>Ь6e닝# R{'+"_V6Ƃc`9*7'׋ !û5/LJy4o>怞({!kIoJ!kZ|'G[P{/!O) ht"HAM dxUOGύ'P<>alXKƄҌ^R. 3pBrRfiUcp N&_d5@v<1t1M7 tmpqI_JPTm$o2CKg]Z &BUY~O{󬩅ocNu %-sq&PrTx)ca2`1̅1[5Fa,rMeAJ@;2%ﱤO0 5Ba$_ELXi]On[6lƏ"ꫀv'o}p*/P[Pc*R5~h G'r H8H5RV#]TL =&ӅO BAgc{}7#Ҋ؆h1 <)fV{L^mmSI*=?lqvAA*mnǔsrv:o6HL[K71L3׬[Q#*D8.ꑩ3"E̜~kū<$ұgr "&I=}c>H$NWm1c*qr*#+9Bh>)c "c xlLjFfq<>>. >cߍ7s=5S1D䓟3wl!KZg5=S88u*p6O\Ef~Yor*s:*<֎wp^aڄš Pef_luz!qDz]B.΃0 _-ѽL 6I< I A8$u}3 ӠiKz"r]7bF=>+XPZYR?Ns ƢI,}L-L<͚↞CkWXA2@Xyf+Ct3lmT\OMZ>=ͤjWD!6" *[gQ32Oҙ:*;4既HO**IƂ2oۗ {rD:#nZsf4i~ MW/;蚴5}>(k@zfqK8*xx I҂.@CAP<5 I~""bҽXT"_;ݺԱL tnJˌ)vp0M|BGCilr`W|ya=ka?ZELJ$"0̃4(y}rۭU?_IN?;.3Dl֒ϵ[v4'Ժ<\@pWdc^'EV<ʨ!a#IJB!SV1]ۄ3@%>+gֺ ^>.{4r FgQ]}%3gppK5ڿ"sm}k'_{S We&`&k]ɐ,[(-mXf(~*-D`dUGylM˗o 6_́;[A`kK$D hZWfQ(|&0RMԒI|ܟg`*qA 'L~Nm,7dx֟BnF߽mà"\ UڪegREWU+u_p s9t,RSؔQ6 g_F厓K݈IޖІTŹf-q2Xi NVDž: ɅȅD=c>` FXm)Y]p<ɼ5t @UUvN`fƨb& eɹ$G6i#Z0ۏs3ؤydcȽXEue!wuIWlsvx}u[:V ˧D:ȼP N :6ޕ5hbwF9t  '(}V-N bzx;ʼN#F6-؟w-iƾp(@n{rEik}5k(EL c2RXjvmlkyd?H]Wn//)e;RP/|KAvGLȜ>غR̅;gpkF]wGwl#{g [:!'qILfTMe#:Ju`A}^JYid% )p.;jw~9Cb #XG+?r* r}L)6ҒpL%PSVgЊcH=O <7QFzK}os}"ܙwj7iyA("$jA+Ȃ`'/~c>=oȕySY 7 K2y8֠o 1Yк=țIvhB=a&FNkbH꘏"lcsH]qVvY*mAތ I]˓7N0w»7DJ4RjyiK1k  f TD@tOeE'FYw("M>]ITc=S#)TTgA:R"Xa&1ީ7/2ڕ ymMEƽ&Ԯ%zdJJvǢfgaCu=jl.ї1^nb/k; 0uc-X@V9ۘ xE)ANZvQ&-F~W(Sl3:SZ-q[^ ݍr_M@=wp'e۷=^XҜ+|䡑2w6OB/YcV#W:ȀeĝH$sݷX~g5Yk2mD64-x\+5,Gy3m z!?Ș["sk얁o"0ʦr#nO sHAQiА4?_ l_+sRz (9E1<3¿z6*R?Mp ~ 8Zn%IՐyF_̌30R½:cYqri6ˈPNzT?EdӋ\7瘰֒g-\gZ^2 B/&ᾳF}w{Ǥ_ڏศw ;>V$ǥjĬQBް91"&۩!K|%-2C jBFx'dX]yuCj)k\}t̐j`my}1Ue_!?[,}Y۳kKd [n]^ m:`@il_oHj@Kxvf)u>zX5dlu ^ 1'cYp%#|G,7dHG( %: #b/MlVNEwx pեׇ [hy"~Wȵ^WO?Lí.$IÕV+LPOe1+N6rBcCEX,]`@dYFi|u~'pI .V_ #ǵjg TD?!- ^wS e$kn'&SIdf x{ QmS גhڪ)¨g&IԮbjc,)rnd.gD ~Lxr`-%/%2)NUV|u >W2YtknvZ7?Hi0`#7e2̓M^H_/cˁ#[xQ!uM,R~) <<fv٩эdbWUtE̺tE E\g6\vUdn=÷Z2'e"` +TPa,]e [zvX!l󣹪^ *긜]˭uN` X&/7!| @`ŒݽA` 'cWcGN>[BnƷ8UrknkزQfݦeuuh|}u4)hOt)‚tyao*|+{:،x8>>j-TE")X谾iL|PZ"7g7t$4iX;q!KQWI;a1!im+)hm^gi0UDi}|w,'fTx[A`Rl|?Յ5nN:g )byՀ1@6Q]b\rJ uA4@͉-T\@`w{3jvkE*&#aQH$CHjN*VSr C;B{!ʼnh٘Z2{DR#:uMEeel>ea!hM->Ǘ!yw^K#à`Dg~:s b7N莖W}錉lׄ~~.i[y_o&+$QDzC`Vg.%ޱt2pf';z5g,>IW!T/Iʸb-_%`ֶ(hw=}(} G(P5Z71Aq. 5IT3|mtw:BapHM=|<}kG fwv"񮋈C%?Z)}Aga_Tښ`=yyGTͦ@y*5 bYoI? %v(iS^6|qDPe_ @0D_.%5hF6T} fe,6{in BlGS9J# ܓ+hK8ar^iO`+CnqWrGK5~j;;>-(7w?rX-L6RAgL\2R;V|q[) ?${;p/<؇Ş4RH׹UEA!V4_ gmZ {pՈ -zN1(C&e GG4y0{T 7dTy)a Fl3'%h)~p\*xyBr;x LGӦb:P ךR|KyUnֱnDD7RwT92z5VfKV|6اyPXu@_FqaeFͼCbɚTĖPtvkO, 9r֖x^s/'#[r ;S;EdkZȁ#7F#ա@v'16ەy^5 d0><%/|f?MUKzxqxJO%~d0W n7Z#RGLDIc%8r:P;q^f׸pG@* rŸ iC@ǂB!S p7Whʠԗ @5]Ԡ3m Lfq"y3۩ _c!ܛyRߥ6:]oD~zOx={'pFlͥ BN%ʳO񝕣1Se!= wJL5s\r NX|+.4'*)jfgKtږ6sпF &ϢQ/1+x\XKExL7Ҿ~z?] lHIrٗ.zթ $]uMA^k{K/:A0N{wI"~s'2yr@r 6GӌPDpJ0xH#Scc@@d- {bUzaryVN>w* EM+OgaG`K&HvZB< qYΘ+RC*P Mf`0Л6ڸaю,]0p[6op¬0{!}΢*>PWU{_-@) n'^ΪC* %DAgp%WwSYCp$Aq6_NMZz ,Qa،OyxBqN+t١,eCpX@THD㰄뒎Wp&u B5 MH= j| ,©i˪OH; [5ax085% m35tۂY,5(`ų|Wտ#10KZo I071[gXs)PXCqfv?$[Iu1ZoglO|^ 1ΞpO 5Ř3 m{Oθܾ>ì1yF{e}A>]rqd'dh0T/`P:{$ѽ?} }I>oV yMcSC_uri+QT 'Dp5YRyLqsl2>&4NeGe )A$G/KF K 8),~n*AeM[!J| Sd;ONp"=|P.˦6⦅:Jy`U1O0t[@rPR!}#F;OzY R}Vp|:E±bJoH>:So-&t eT|CN)ҧWf RLdI!sx{LwKZpNSL:;gO؋# ܋Ev+{jByʢYjwr|T̽s57䇭~+aMiKϚTW순VZZ~ch]{/ZG䑮:J|7{[#%P\fΫBq= SOܜ.h3<3ml6D"d]}5CO@/$9!⺜&  _gՆpƷ4:j DF} E潘 leX b;[e]cý bI5A ЫGRijD= 2s,6>Muq94x;[ +4j)w[CRrodD?1+A5Z'qIDOu@ XePs&:,$^lp9-*;23za uw ]"Ӛ;^{QAz嵺๜&Q.ZI0LZk/w/PG?\Z)||Q|R@ZX{WFֽ_|xQbҀxi\$veCcƃ AdF3j[ܔf ^ o$UP'kVg -( RL yt)/ {*L՛,;B8d4/ R zu՗6%J[=X1ƬNs:t,<';(^{xGzg4ȺmS~~jdg|0Ӊx!R3N[#P@ l- 1HM[ZWW@)3sLt|:"T:Nۏ;"JGۑ} h[ДK֠\= ?#Y)Z~NMQn>׳跥q{{Q(U}̯``>y60P;jS{Ι8U̜ V=C F(*Naa2_q^GoP*1hk|qĺ#pu'I{3{'s~Ι~«߿}TԤ:ZA _p1U!6&45Xp0+).8eh`Uitӳ D UX.,B(6ciS8~u^{iXL}Y0 JyJ0Sp^@';c,['g& v~_0AۚHtq]SB͝iL52 @\-kj~~USse=φX-Lq!˶iѓUmkHwLhتZ4~ViRkWNZvWm纟O6_ίCB8>9I}1J̰3D`d1^WŎ v0Փz3ӨO@8 <ӈȟyH⤌r7Jn vf~NOSf y&WtmGI[@BWy c^#496v֔xZEz雖1lIcZe -usQdF*_UuMIxiFf@^]V[gL5NQ\P?u~PtO1&63<#2#tiaur Jv#$89} 2;!&Y۲P}c%/NS)q=%tfS~3P܋-UGO?lSItfIݭࣽd4z }`-.v!63liJ C5,k'tڵhĠz.EՕޕO?zS'&.r;bEԵ}^X(O5uzpeL+ %=ۨr[߶s&Ck<8:j cdy-;]ED|@E)Sfl2" yW"98ZLtipr9<6c;jSV;*wnY9+dpV \: dyЛ]6~L4SFojqᏑzԘS8/}J b89)q @LLc䰑3)kO73>PPEkiOLB:H3?0 lw5*MbQ:H=1D3Du'DQmzݝ u>l3ucK'CBc0^WA-慠Mb#6D:;:Rdtt,2*.TnxWfFx8sRc1)Ýdc_#e]RԵAV2H:JpӜ=)_AnR.h5砭MJ儹'~sqH=mNRZ,>Wjm3F!cibγ] |M5Nr'>ncXm -(2uC+mæ}eOp=\BD_ "1kU}2y>7}#e~jSs7RtC?+f\%Qgƚ C6@I,i~Eʗiv94{C%]|J66W !3Xߛ)A]يeYLBnx"m V._v=a/gG% [l J} 6PWVYÀUu>hݙȀ//-[u2bL&@/w,YHrJ ZPb~oeϳ,P |p]u2ʽiRmRkkAsmtQYMpM7%1$!hKuhX2/5[uٷg\6B 箪/(1 d??ȯyZ=vf!JysQF37QbSo-mibwNI{[2:E:^ˣO>kwj(1ܩh ' xhں9`i>v>`{X7QrH23WXZ˻8žCuVӡcAPyp$4 +su߿̲vyپBΑڏȌpʏi[2cY*%$Pu b@̙B슈I$;14A,ԫ KyQ5=0mNwEs}iTem3 xrNVDb ӯ%i ڼn M">zOk"ۅYY:K E1WP:bGRHٲ܊֬?޵"[-{- '{ PK]ptZ{- bXZ ѓoJOqAOhmay+=ebMbhv>K_?efŇ HrBͽϤ?\ K? º%xVF#*͛ T%9RD֦/SfsճOWH_FjM 2BO*Yq/j'u!7ۘt+8|ys$ΰݟ(w|lb>ܮ(EB|)#c"IJf R;=3'=M {6-;[!!Q F)l `2ϩ.:7eSP;ثde n[WN:rĽCcAY* B]G3&SUx88.6*2uҌrpQ<[f[1 +)m.CE6ZP/ %UHXIY@b(cr݆'ۅaýi(ᷜ3  N}+5_Ğ:!:xʩ>im 6P._q$X#<[lU>5׻  &QO6b#pXn1݀l!j()'>ɫᙩ0ե}50wv,?@St8Q4sG{tSW 6ܡ#lDhT!ꙆtS.L]9[_tK%:7|}򑫯Ӓ٠ .g`Egg߰a}= =M/23CD, Wlj?'rkмdEֈ׿}S:Q*9Dէp4bKR@R"'zyl(.RRh!BjU3^߁,M9N'Úw+g>j2 $2tiGZE C\%u ؔ]gR&1$+w+$MBC)e!dl-m%qNz*5bۜ0@LjWu!PDY VbbT.?X6=Rב<;凑[ A1]}m3*x&^{uw@Adt7fpεGx4:1R;Pkb/CN "MCU7|R\:P#Ҍ[IYkk$Ñ]v'08q޻ȒH#_UJ*ZOf:Ga˙2m*ƮyI;J*k&y]~骲Mk{$ L;1- GA)agu*6Z Y6邔qۇddsCڳ8#}+;S7܇|Ҽh\73dzʧTh[ N׆;D9k :DfՎnfNg6䁂É@Fir !M"t5;\XpM[F#:g_ qłd%5J^}D#Z8N( 7WFW # Mm} ]," m-lJ{eTB}1R91+I8a)J@APŤ?#@Baffbr߲ZΚoPNu `pRy{.Dno pQ"\tfπ6O4ݰ.Mđ':#&9 ,lD:N28  DM~p/W\O4{qx@ US,X30'uQA\( x5qӵE;^'UpL]ڊD$ԧ-D۵xp/DHRaRvPLqA*w\Y[g&_%NB<=o}l$՗fF7tӨ]Ey9-GtCH5}|cgNed(z$2w(UƥoHJ`#C5*} ~}6C-p\p;~}QyFNXqb6>YHl5A$k1Zf΅7زg^vqFd6!CUeΦKӌ8'Jٔye` ~)YG zwF.OQlZp"Qq2u(Ж';BՠdFWHVΡkF}ZЏ'MUѭ5i[2v{&[jw*+J y>{.͌XKyݎ̓Չ8oLI,Mþ i3\m_Kr~QW޳' ܃}m_XѥAQڳǶ;z4}=fE}S0T. ٲ Ah<~(a/DGd ~1$.:HE7 Iɗ4h ۚPoϻKtjNCFVmc KYWPp }QR$qKCݕ%sҶ[zؐ3WfAo;^]xu(XiC{.=A&yz 1{h(;0n9OVd<`a-9%Y?h!ОdxMFZvwԘЗ ʓ DZGPf'Ax/7*ZͻreͦD#P:Vd>"ʺBsIq}(چ.;Xm=7Mm؃9 j1hUBPPRZ &#SkoJT)<)}-a_]%P$v`z*BU)$ps";-L6wIA&;!@ak<]T޸ x٩ޜ;,orE,0P"d9 ᦔuMCs{qR_!\%&Og_v9St0C3ujD 6{&2VDRd{\8i2G7O9{c"# t4kuBV=\i"l!Oݐz4>&O90WB0o6{]?brV[F۱v9d܍z(dJM$̬ yGm Gާ\(G>[D&:iF*^`.2d4l8uO8YDv |g1Y}sEL}/9X?דObtqfx0|ٗw=e11\093In{Ū =`ZtV4L Bf,7#>`/q }vV͹b:| %@trm11i+ĉhZ$<*MS JQ3KWtW?Y1#-a$/%wwDa(8޺ r>Mw:WO8NU3Dy7lJ"Aёiw,@@+$ ")rPV{M"߈tJe$QbK%uJx1b_&܂ E`(^PO'^գ`j0&sл@*\ɧ1=L 8Bg!Ý ڛm)=Ԝ&o  >]vl[4 MmSAB!զ] w҅kJጔK!V0@;wE#&x)w<@0=H^q"N6Ӊ]͗glg˩vbewFK"A |69uIGaOҩk=M֖$WӇյ벷B̯B42jov*:k%l ]6m_9#+|P=QxESaMOF(E\o0:mH5W>E!-ѿoep04x"e;Fe5Q7 )2Bu4W95i~J@ko|cI2 A&Bo[c8Ak's9 KsW?_`=D”.kk =f dՄ1+:ì &6ʉ'(DiK\d1<0C,MP= n.^ᶗ,# ;lg&ZW.yO7L(ҷ.żS4\P (r6ykS0dadf;pA9QqD̗ C`kB7~Ј #Bt G c\:֣4bƒ{AR\/ my q3 z˵6S)es1D6o a2X_SGv[1l1s YmxdUsfkz?I`!V=d= avtL~Orl/ؒ^E:Q[AEeMg\Y a)xOHH_ ըX__\QJeJ v8 ^|0.IK)(@&: tC:m%4Mx'z_<](&QwlYK^oN,U QEn 4WJݻ`\ FrM=-n;PжgI󃂤_G`=PE~V*S-sRlN2uT:P$f1rm_fS@E'>L 삿 ;[t.A%7XfZ6:g?B$9LJe*!,9IW-`ED Y|; CiU&lJ~tccr<;5?\6 <>G =qO-Z/`YRU%#H3Vؔ@jF̍!4U0t<79+5 PYLؾViR,)h)QRe:Q; "k1 `xJW`X]ە(V1=گ:L.,o ҂QxGjV\:2l O\픙PnaI짧 r2K%IgWpѐɻ~} 1W>JM9M e8葜\+ ?2Ka|<tìTg|f0~t6>q~"=R45ehrzGav52UƮ<<0/"\Pmu۰nh- g3qΥu8{<35~ipl|;/E[88'\O\A8{)9 ʼn KmL1n($G>ijV#;"p{A}!Ad2z֬;GD/oiO[Q%$ 5>S3/WP霋/0m 7C_\ Lud=M l0E lp;ADW{t$aT?6:{F߲&o4 Eē1dž@-h.NƳʧS7cYMZpnSɑwHҨ? 憹%-Ίˤ=:Hh:a%{jyC %BQ`op]T来3FF!y2[QQxd+~G2=O-P [wfU$T\L"c]gbpO tSˤ -z[CJ*nkBNh4qXV ߉jCq6I1&%pI?«ŴC@R4aZZxYUBY-Gq,D"j淦 _Bml$Ŋ2ae38]ρF*@Nޱ`p% kZd=V=W͢.T;>0&$ T;XePXm2E |+]l}~A]o cZSXtZP8b0ćⵜ]z;Zfq~Sü<5kX0-HvmSK T|ovzA oYt  şNI06 xg\xdIn:]ؔJdp)sxgU!OZ\LI̴ 9')d^wؖ<*&BֈsW1Ur71Pk{e v}Z-iú6BN*IC?&b k:cu޿E"ezUvj'n[+Brƪ[Oѐ#jKɷB,7|6]B%dOuVj7nPn )|AE@cd.F%ݶp'+-M\cxeS& d݊ѹ`)>D!We(t7OHIA@Uk\c/f v IA5w:'UUͷEYEQ#4JN| %= {6v;C柮sM";7YmДsvm+=uܵE.Nc f #2sdaIiJtOT\8]=H'RA5G:KAS&]Sކ1em( 2zLtv͝Lg_^]?{S;usΊid.TG>&ƴ|aC_(W[86;ehMxf߼ !{,G(74'(RGMglD~鴀hJU|WեA\=WGB⒒+Het )7Af-%j]HX+8*?$=wW] +=@v}z $f;l*D6O1q:q#.ã8y9(oVurКVCV|XL)WjO6Sy3 n#Zݚo - ')%cT,fn˄qĘE4U{tOqD Դ{/%DBl ya"&84﫜m}8j Bf"Q$Q꼮`4lJuOC#W߹u7ߖY]А| rn,w{RS,usC**A=9԰kJ 0FJ?+kL뒸8ft#8۔I\kgj|Lݴ%dz{,Dh:$-gxp6qn.NE)G1`_5.S^E  oQ4&F_ךb@ F{i[IJ]%AӼ&^=\K/|}] ޻@U[gZjtz?HD8?el9o5/TWȭfLĻɵ|EFCfTW ixk*Ni;T ϠT~$mס66 O[8ގ,P_0zLl:ksZBOm Xms |Z>6XAo2zsTRvyS~^9ƻ^'U5DC!ں> Y m 7.-ףtp̢$ IQ{IVM=;N.\${2VT'<.^8m.|Ϸ23+@\H7"L8Txl_[ye`Izæ3v:mk <̠ELd3jHvM~hs n(_ >7T {)PTBԮ>,\} X0"V.?\ ᩞ  %¥CDŽ 5&(\«HwLIWke?~aK,nHpJO& $WܳJ>ܲco[QVf֖Eޖ}Dri(moʲiwa6J(鲮R'!B\xujKy*;N(~RνO vOG>[ o\Hٴ\`ɜ#'VOzÜ/Z.JӦGC~=չ֑P ڸSe )N""M J 9֬%?kܤ5dY?oXa\Jr_ݫ0}X,=[LMpvΕ^wFj (n$aӑJnylYM恒~M\#t#QYe? )96)LN9%ٹ* s['2"dQ e@,%E nǬyNmZY2QHTtL @ipQqgϵ+G`X&)ՆG.̀&"f"UA^ IeH)VC.o!W؞ [H,Hp(b90Wv z 2)ɕ8`l,!1@ Uw, uBRTJA{f:], 7r(G NjM Yw~RgXBt=c4zsݔ$ +dGDmv֘.ɼmQ ^<]D!OKyfp{.ƕ9@Tު.BY y۹bU]Ə|~, Y~;hzbd "E/Vbdo3ZQMrRh+bS\Ð 4H)ϵ9;OSDRW$n)ա{$QoSyIDVee+1bKkv3IW_YJTg%a\7! * 7Cr'a.ZϠhM ?W@ M?be[7/ٮ戊Y_%%_ShB z>[ R!)WA;kANMendsq򫛴|414tX5@<4$rC2A1BH⺎[/rתlw Kvo-}gBºT3?abd,F낰&d&v4h?+9>KX DS mJX-СKط'`f~#uH?,"};un$8TV m]ȏ t"}mv9=|ӗG̘$% _~Q>7M]6'&+ u{y% ?d+vػ* p[n dR(,:`S$B38tZ}-Wз#$J cӐbkLSя"Yy2J-90u)=$T?u]٥R߁V5da>tYl̖ ߿(lÀ8\(RBl}ztb3%$F8S;ʮu ǡL9N`WV]ܢe>C84s vȴ`{:Dq@f]ՐA- MJWS@heCc$>lIg֊xF]xb^Dx G5/֙OJeX7Ц}i4_Q)&\?ҺGB#>w+]3r8݉*M}tg:5}LwC?sg<0z(ڜJ3Aݥ c`yfrFB릶?!GQ[q G*7˭]Nc5ܹ{}V<=NQ0RC=J 0-הwZ("\_k#K >Gv=W-OBd̙{N2_ch>4dtqPv/:dZZ$PEoT ֲnoSM \u o_w֋$SuC71[MwoYS1j\Vb׻f!}#s =QT@l;زK",;˨y ST-;8GMj\v:#2X &)QkT5'*!%~5/H1p^uS0Hej ׻s sLcIT=lkiT'{KK&Y"+wB58NyZ p(rqZ 6)E]'b/?Fuq"ïźgS9,4xD82?@6kXKj2icnx((5yD,Y:P[ Bzcibi>~9ؚ('%\'")WR Նri>^OYۉ}S@ӌKB»'eט%+x} ~&oJoƒ=cψ:bYB%{t":2%F@oՐ2Yd1;>ad[Zi]H0k4vv9tWe %ڧbMOۃ.{iz:9]"F{VT)c%Z6:wk|<hsYAn\V'o8.Pf\U)[p \WS7R7iJxd~4O5$F FJ{t ^zڟ{8(CgM5.60]FLb}o,1 h=tbFO0jAa& / Pr2pjC{#ZrxU&Ûa('hr_gy;E+^>dDؠ@ס1NMPthݜpwP$q}h)綦c$Y ƟRNNyL/:)/0TdhV*cê2gy$ 8ׄi+G-=l Ҝ]^Iؘy ^00-'cz(X/1! Eb 9d8yz3'7=V9DѺ:~AbNR]+ؾjL$dY vˈ#@of~?cU|'/Uekgdw`+f#+>O/M9r*wjZ8^ xqQ p?۸W[*~= 1Ky*G'(YVez#%e_Jݓd KUӚ:R5j]Y)P' f\  dx*-E w| /) B?nva`%q;>6A)E>B?hD3!KOq ->H0EwMZӟs3 =n/ĸ W4 "ς|ujR ="69E "@@q٢XZP~s}Y1b~FnUn$ yo'y/1_R?YU82 #MtmY’]9c sJ_fr ^ÓBI>3yctQ.ށg7'-oED7(gOc"F%EH2N| 좱QF0U֞mjKJX} .a g<7)s, Yr؀o %HAmdM0]Eeq-Ŵ~S;}Y$v 'iN)Qrai9^9a0箲Rf?ɑ pU+&T^Ef+=fؘpW ߣ%")* ,3|e&B Mu=PX?T7 Ry_% *W? UWs+; qkҫgw?dP/-}=_0'x 3%T}Z)5S1yUڡVGfNce+7gh{8Ї`r,Ӯ1uCӹ?UV6.jdk w8:B͞vfO3qYJ|ͪSHcPt3vIQ_(jmHjx 0cL妸M7c"jLC V+9M^ 2 7#,PhgڸWԧŒ1BGe$mGϜQ?(bv)`:lD=h_Pj)QK`0'Q$nn82z2 `DhwN3X <|YP]rCrb I_Ql{}䟙=UB[RJq_;Wߑ.8LAK(n)Оo A\W9k]Č2G `; "SyUsDݗBB,1.<ݚCeE?=ބ~yօq":vV8Ix- =}=u5yu8A]nv=t 'ŦfO[U !٤2G9,7H5rDHF*(E$zwРXP)K Id!F׆.#Ս- +::ߥAdn22N6 Kr$wmD}7ҺhL|i kJ=̹kJwؗ5u>& L#wsA\ۋx xԮZq凕sH3ci+w4֒\"Wrk]5%u0iۿ}8C,p>\~qv_Y)|ei9vhݚU!lT*am,GeW*5YƋTUϪ#9MK}~\ )TGX ?3ۅ`&U`)}e!jy4?*7D* hً vU"_2&F\V@ELŋD&vf.jS /WzģuVg\ J%D MfNDc\޴Fo]1`40օZI0![Da7ץul%p_ηPrǑKkmA2%ލ!Nhu4C{YtR64v_T0|P# L)ݪ;H[Zɪ" P6e$EMmd >v;gt-Qi3;&8jJzĒ^" 8Q=4Ky&B6~"Vײ'}/LѣB@8.)~fOr"R0Y 'QK=H]N,!xdԨ=h %B3ժ KƹOZH@v5^CDQ{L yn'ZM0 eK8ex5Q]p  Ro/Pz-:gw]7/NРfKb.-g9C$:K{-@0g '0=~$Q̳ƻewnܤCXL*' bՎJ޽C^c@pG+/YbEZ9Tw[CO#A`-2KLV9DD0@A֚piL$+ݛ# ,TЌc/ިoN3X8)*t ޝέU }:-п]{I dX쥐SOG[{&!4co H@vC$cu3>&XbˆI[x~4Z_VLhIh(Ш7U0v Hѫ2 8^P@&4iw@=LE2A [MT7L& hZU#ĉD`CEH`q||,{ɎnZ@?!(w.?}:ƔK\s/Tir)mXy|qea!8r\3IjVTWX a3&0ez+tyWT@dG fB *(dxx@ M^t߼p8lު1_'g7D\x-v?P.#]R~2z[Q\P춷k5L=DWCrb ~ӖÕ[:(J81ʺX#WWHsv5 QJGts66X0ӐT$op.eU+IF[䜹 \8M áٓlHIX贞Eڋ0UM!I.SVr4wjQԃZy=ښ[Tڳa%nwk+. 2px "%뙼amOD&V.?0HP=$놻VM߿oF`y!* h6yVu۱kb5ҽȕ7g }BcdsPxl$ APZ{]m-G 6v#3FC7C LZT^>n5}>@Xr?6\Bά y"OA?~֕=2 [#:v8hU)YWb*td7h!C  *:,UսQdE)ΜG=u~GUfj/߭ۡZ>Z%Dw QHfE\|0U'Wȁe \䆾aJJqtV =c%zBy!|f&JJ x<5Cz5-&\cAN~MYKNz`I;6 F]Q48aONg뎒ԙ@Pbjc]D "-FR$!7\XXc,gu<f1T^҅Y.+ƔSFh}"/ -c{%V*B<>wk,T@ &7\T[Omxf+{?#Ƥ9 6';E($f=\6,@$ phbN[sPMJd%:t#yKxɿOح0VX.xWp^w {{k<~`[]wM(+J[x%ܙ Zbi.B~;eGrM.͏b+@S\IìNRd%;R#3 *1?L玱 >m y FE5J-%7Z` C|G_4Rr %Aַ#-$" ĬV:=.X[˲kyA}98vBPdmK03~C{BDzM.{qCS~6Q^JZ`@?%wm~L,JtltlR  ` + nOusdN,nQr 2:,Sjo-gIILؠOРivx]LpHSfrC 3-,/ DNLOcUS++n/׏? l蚡PAŰח`A<@τᅡ^Ei2Lj]Q蝯~2FvCK(_W|OMq Uĵr V6I/"m2^Wq @fXk8 ;\%˕H֧f@N z,vݑsN}-x`1R3H)wC~ON|tk)F/7Ln=, ['8>"f~e#]OwaF?1 OFZ1ƨ"1+se"B?<S.RG?^nG;Q[t]K֓FM#V1vF| ']g^ipx0t>וnPYh& u42;|v8ޮbdjmٙ $gh@F"1h42ʏtUD8d-RD:ꍆfP٫-t2^O(0 ZG:p8/^W5sKVOڸ]3t3xpNjM;%>!rFη)Npi.aG֊x-&(?0 bqoq[5FUv&#jK .NG X[llc;{Ռ3\Z-w^_LxCyrzGxxT> _#.RW+ͽ9D<M(JyÀזln5R# aVQN7K(1uX *J\vo…ׁwlĀjTSH( -DB{?Y=s:FJ@-BTΒv^4515 -ƛSIl*l]+Tte9dE-k(%ӑ8ߟ [6t9BI@JC+yHk(7 +5H&+v#KL*8ÏeT_|̟y=p5UMɩ*O `Prh"F79h OS|(EaC`oN;@v{J<{*cP/(q$*L[${x=P!gߺnI֬BLenw#۴K DV O,)>+)Rs.!KdH%{zcE кzDdײYk#3m5\4)^vvfǯk5SDFPO]٥سE,jҫv'RÕYĥ lEex G#8CE~2HU5]\|+ZdySoBǾ0,hLdYUk<%.A@y9oBhqK//ckKly!N& ;q8b͹0YSS?CK `3W$!ZqPa/-~Eyn$7=MIKiw*Ԑ NLɲUװ|[|O7h]˨':d"5"דBh&ƒQ!a7ʢe=bb>AG.HHkO9(Sq:DurSJ@q ^2ZE4o<8w3#eʼx* {?ݱ8^_o&E6 H_!u> D!~=bޑ_t5#8n'y>qT8/|u5ܱ"8@cVVQBGrMñQyKR;Y mif0/jM!FË+9րjl=TX)J},|OAti>ӽZY8ϼqܿ[*4"08|B3aH]蝭Ha[_wU[ҕVaJY.`˲kJĜ\4nĶm~3Nl# )N$/G5!'ZI " o\p"A֎} 34#1^rBXlӦn?\} 4 LQf`{8i4'[)NM+625y.ثaV=[lT Cɒ mX fpEql]:&1)be -m~闁lެdhNbR@&)#(buiSPZ( vlȈA,̕P ~σ{䬿5uu̔-^J3p"QZ+?ŽI6B'qw>ƺ–jsɋ,܉ǎlh|¿7t Sb|SZ IBڢI` "[>T7_"$ C1DTxPV^%PB'Ɇ~^Ӗ!l 2%AgSSkNvT,Z fP:.\[PiZ)m|}CȑJyۣ¡u-҇64W]A?J.^R})g@wpT[_X~}ñ?Ґ`]`9<(y kU@[ }mȇ-Z{"')!K};WtaQ&?Lh"1ׅjCV0Ha*n a7nџIҴmI{: &^IPo{.}J-Թ~JOZYᖰ]ιGAVT3RDi׽Hl;΀'ñr_g t$;"t-bgZ0oax|70 ѮPD}hK7tyv;8I'*Tp?[<>?/K XΫ*ʕxB LDyh~ ۀ1G[]ReMDiTgt-$jBڷ]lΚj] 'q #7R /H׸ +X/7J:`"[<=BF{<׽dZ3_Eu=bf(r o ;O({:_ e)@1xp9{aЮ3zM2zC ;y"򋙡&d"G{/t- ]79"UK%纈j>m-oi.ח pm Giiooy7d F!0г}DB[priuA ߳Lje "GH'`D5\X{ZrN RU&!vكrԶ.+K-f}Ue;~%^;3P/")ch}`nm 5 ݿCZ#fޯv Cjѩ-h&-=+D 3s45K=eP؆8{FApƸJ2=ڭ\*2.6.vyJQϾ 9yltWBfG)IKN `Kt@ ,hN 6t^WHےhl5BD0_ḇb?%Wrz%m3wj@vV GzhT"bIbG&CZ5ձ@KY4_C'j02:.9:\`m4NA ]w njzLblcbEFt~xGW̒()_wQ{k#)q\EN},22uP:vY.2[[2\&ɽ.fX7T^ a.Dthh?l,(%7ZY.A97Cwܳ_o!$?pRʓ4\d~AV >=*<`12c$0._yشkpq9 ^dn`^w\m8@S?LDq`M8=A S פ=s:ONOo݅s|UX1W?i}t'4`xmfZ?*2aP,aTWhtBG^rzI)8*ߌHB#:+P9闖^5Aꐠ0;c1raBW>P hb @jI. [<;~v nGk~hHo%z Ur"_ :YbyI՚nG1`H ShkY'"5߼!?~cYp%V{(ϒ ,Gf?~}<.4@^8&b:PMϳAaTq Ԑ@mrbE/qKhR)t)C6P1:hӗ%.5eNHr[2ƛ^a:R\,0+b*Hy- dc$0e2K\B6r'2Z ~bSuY/$_p(@m2sBQ!]}/q۱ 2w֛i *WOQ0H{,pxJ 'WfeW|!>K ݸw_YWn XW)~&:P7^a=laޒTbJZ.z^/GYeit Rz[*/`=w smCg&&Iad,>̚K״3dWFkϬt 4ݘ6J `}5ᧀ ?]p ǡ..TO`I_z7O?d(Q̆~J' G[>$ICL⥴OlP% `VWbߩfz"/#.Fw8Ay Ω*w@P܁յ,QƽuN+yjK%+8N-~2ʠGnCL+W u̙L tA9( {L=;d> T7Y9B~z҄S[<4!HMIzZ/J|xh!!doU^osfԑCLe$2RQ,Ns^?ߧ;~j\ =ƭFtqr7wQul3gLu>(8p:X tR41&i˴dn(J-vSy8ʣ;ErxWۗ{DOv|}!I r{^yqLbHXrb9 Gx"73Nαa B 0*Z9$k0n '<\VҘbZJOO`_.X@rm_G̹dSB YfpV W ޕ`55ϡQ,c8/}ϷCZftho;>@,$ޓG#e.襈sxO-V}J^²xQEsfy_4Oߒ4؋. E)q3ϵz~wx_v.7DQmM*_6(VHd_:}g\ue宵@_,ȣٺg3f̐aw,p3`>}TT_.P;x!182x9w( 67Bn/j> vSD"}r)HO $ f8Q'sID싖*"g/'Fm8h(a(g6WՉ^ ²O#azkIakߔAg6޼FO{7jMޖ?,l觿 kwC^!KzL/$[<Y`R$0֙}[Aʐ!m9szrD13UMp݊:y?hA.ocD19ŕ\$+bÊ)kBU(\\;|зRC-DoJ[ꋎ?>&Ǭrx{R% s\y/B637i|{'%'`;fXVe-2B9ypk;ʽ@߰G|SRL?G^#uFe'`|+uکrC f& [ i8 1 D+=&kU/;u0o0o+$}UV_f軵Z un w+F@<@773eVK"|c롵v97{q")XK,/) ,0"CjE0x!NfO,ЀE+x>_a ~]Co>Yh!>Ӗ4#㣐K?y>Ѭ} /tXƒޔcjC 8FժH|-jj}W8E:&GgD-\obG\55KQ$T \p-cN Z;HSE窀q/$\m>}ypr&KCCoDP>V'yS5t3'hJ!2#$oS҆zrG˒H\Uȍ,E B5տAgt f~ D8ѳrkvbV{*B=a"4T,Pٰjl'Pm3D^dsiyI;[]{%!s9W,O"J{@fJyϥá].,G2V~8LMz;q/` rJ-F|#2B潝6tn?l?*4T~z* „=,Ǩ |3֏;9gfb{w_;`Q!i!qg]lӧ+!%TMJz_+jB1 ⢃hkyNnɜww n|X>zʈD}ƚ@1k|zXJZ|f:^0$QjMUjL\ X_H:;6 I@Ok`6s@%enb3<Œuv;A>VʳʍLB!4@ܜ0v.I)ٺa?gI2rh]9]vgy|z0 |$aQ'J%Ms?T\kGt jbJ/s 06Q4^@-1d+m)`P=$')Jɕ@9:/ia `S\,ÉUU(+ߋq[".5pͽ*n.nZq,v(6A̕nr:uEy_YREAR-Te ;|r<$wr=k@ chR7v`zqa1ɜȜNa\C-B z"zx&[{zOD="tTtSNx`׉a. XY 6u,Yܷ_%xgGrF7ԘR$[ROlƨ`5|FcɭtJth W})yNj͌/~/iuZIz^&0ekiNĨ|u!\2cb h8meTB01Vw%І<.=Q/t熂"5x؎ pjN_L$V Y2_t9 d l2GUchm6GutniyܨGۢNŭ{uZs38oa9Ay鯒'IRǡ\3>kfH6@wKLnpaAۦ/ `x>&6d# 쳗AJ@Ӱ!\Yy| տGe g^S;Y p'8|S0^i6g^}8×[Y lX,1z;<8Vk)0_a$$~} >#G.m1Z? Cy4QR#̤ug(:wɧ~p&Gbts]f)BdjLڮXi-&n3 @`Z5L͔wi\׀~?J\=jC3U%\,Uߛ#|5ʂ'fG:!{xJ. <@/@u~+}(NޛIR2mpzPHm"5p )R|)]+K U咅&Y3 DMvJNbdzWmc%mlJ.,:+;ӞW$=y;jeGT*mCB} \6Gjo֙]-V !]ʜᅡlfL9idt3lV (va_wf$Xf/wp. 1QC'Cɸӓj- =? IR/'$wK9Y(E["S:Ì(gbCPUm@|»H|\woP0jQaX佥\5,AUWd'kWCPî}#UJ?~ #uUC6{vp(}=|R9OnؒA*߮|\޳Z5|Fzd{sj.&;&dh`-%30Ɂ;6 #=;MFA#G !!YL*Q.WES{Ia _ݢb OK<9䇡z D^e+-SkINW0bgԟ yA+QT7%xLn[(3P2gEc$w0t?oֹp ~fS889e땿y_'ulI.M{ꏔ"6a2zu^&ъi͠=+!޹{9X6F;ʌ~g^ Eò!C߲cgԷ !vrȦ1 pMT;>V`2ֹ`~*_?vٜekW?@['v,;{YhL3㲁7);) 7Mx6gUˤ] 83c&+,]bVehFslz>Һװ0onx.)Fk3]D4sa@V { d-ߘ"yI'2AQDATk% ^r6wpMR0 '4C랍n0#*]9;6k OFBQ:j2:YR ~ܰ &1GBj_H q@vkwg2_tC6'<:%h*&cIk|UePDKhh6e}?A>/'wcA@k3-'pfٵx"66K[N.p.b*[N~3`حZr.m/B?ƯXл?Kgp496IA-(Zb5-ajgj!4j#E2kLNw٩<D?:_w=Xe%۩:< ^FĒ6ƺgT }n{(4[~ q ,?`å_Ҝ10er>Jz!,GOE":x)48su!f 7b) ۔DKssNzpAuJX0ſ2Y AUk_itDP^pg2۟bؐ`$~ÐtTws<0]qS+JPx8ye$wNRY2W!dM@3S$:\` i:-$ϫvZfh K,P>Mpd.s#%UYT\8 <$7B+&HnhħX޶|hzN0e wWB7QBFU;vS<7ZEu*FVli552+5ڪ>SMYL;tU/Dd}% / Sr; E2<;&tX*}%!: ,HOÕx@vțk5@&X/CJMe^,K ؈d/?t%crHu_ yJT,ޯ7c *+GfMr;CbH∔0?*|ȬkOy2Sep=HV1v%^-Tc>E=u#K4eĔn7xcUdLe}<RtOX.Iu 6Ujtwy9r-=}RLε!l;tZx}/@51ȧ ?y.ԭx;(E.xRnH=?q` y<3jb^vO#x&r&Ui՚@|N`e.:6 ] Q dxj:1vwApQk7˴8r!@EUvk<#և"C/)#RUulPqdV/ceo7IZ=jKI~EQTkfeۢ笽('m QrVs@ .,`K~u'U mD6m/T㱉=0ax<`w:m."X5BWF@ލkE3>0*W@7J/%vVߐ-~zw-JKiJ:z/E1PRomZvCl3,^ʜh=$(?j8V40[:^>*YP@/.nu>Sˆeǵ<9!l-P,&wd'afί|;#ODEw~ ɓ.bgɽ"1UN>n|ԋdvpw B1$;@p۰r[p"*#|# , g܊nfr%z6,QAZ,UiR?NuvR3_ \u"! $jg$5Gt1ϳwؚ[5o")O|TeQW)7**-nCH ݝA0#=jF"GڐH?k;ϫRH'Ow"&x{(Nk;21dsnJuBZx,wA,|t ~!h B HbcIHܴpU uaf0<!UPHmڣ}XU7_n]#Kc%@פ%A$ڹp:hN/W%u4Ol;B_]TI6;Oum GN 7/dLނ|bCAG :h px]NBr/%<(\#*XTDݹV#tqc\A`zPZO4F Q!~e"iJO\x]Ƿ0j'T$\Ѹ5e0y[)dQ7j+EI 7Hef!Olp|ؔustm.ŻO9bXJ%Yɘ.:P.hxXnTqɀSr:iVT'K䭠bRxP+*{e972=؍hnfp|@aV*'-Tyύ*͘r/u=74i#xDH1ӊG粺{p},$C^3X,+ߠ΍ShM~&Qj2>dHH71stvbr*!J'抄Y_$T-f ÛՠN +4Z߁^$X%,wVw440I'ѢgQxcټ>RIrbUE5+?1 wsP̼)^NMgt߯|竗ih9*z%-a JkGB`=.qB,-X>E~FEėȷ)<l+/Ӕ VMOÐ J ?dr0Ef묘M0ƩK{;&~xӑ5@9UHp)D}J c^]#5ȗl.(FC,`QvsC-W0N[_DA2+o^Z_3.b.xԒ Ħ- :E>_1b٧ahz% _6e ۾tZh]3Vb\|>;$_vWANe}6e3j4i|ĸ;px;LgȖIkg-G޺%z+ y8>I` H 2{HJ ieyn!C-%s8rxVyS٩p\Ȯfb-ŔuVķkkzUBZIM69{$7hf*14#I/ɅhO~[1ҠTdz%:)06JGԜKТV}w^dҜ|i8O | FћR@6>lY6_ꩮx6%l )wӰ֤[ #A&hrpA"VIQ߰;%EfnRܚ"r c%-JĤ\?飦yuꜵΛ0Hfm#LEs7.Q@@DJއKx`F7nxL_]\6$qe%1i}NNTl!Dct#LmR'a&'ƘCbq)p3@{CW~PZh \Mzö[R+yi8<=-#9%182{EWǵ0G)m=_N(WKRŃ1ӟb.dr҅^`UcڛUWTkJrlX ﶖ”]aƅuWk{݈{%1ߕAG\Lw2I̒QH  9ոUܜBЃ6{0G1sUrw!UUGHVT<AʷQuv/ޗySm{A3GVQZ^maL2|Fŀ=3p-h1/죤1aT%pX}"@[g{DnYFR5K6le[԰N؟%:2_Ip!F s{)^>T8FilOw !8 T cŀ j W~߅ZV`aJ; l?^eA2Yn[U[P)%~ &`{J@9 `w >g+W aAϞwY,6oO<0̸9dRr2w9Vh\ *nW2 k1H3fyZW1uLD.1b=VQ.LL*6|&q=ER> sR Ҋ*T(1>&6?c9 |ſ0:- Zɡ} TDmՍ:HM]R⦳nͤ  䐷+tvP O0Yz(]Z ։Fz -Y]y{K p$'="uס~m(<FGMi-tG \ʕ Oݦ>n'4(?2+R}|@iWg$3"`Ei3bT}v97e?j]]4&'l+˵,`b" 9ݒA. ]Ow.p% )upC-j[J#vKiM"@RS*77q-D6tVLJP IzO6 d;F⿐;v:?tw 0Fꗺ5Rhol:yO"V.&JhPa[TGL#'wI l+Iԙ/i%Mp<㛪uJu{4(-ӷ5ahitì q 7xOg;4KD/Q+)KJ.^\nz\1a}Q0(.E3F|pOHd])RiuG^M˫%b5o'`RDifwpH,LZ6Ă M n?ALΜ.uwLD A /?Qu3.~&C Qn)?S_>od6&~j-E03^X7ZDvue1q4cHUF^ 0׫Ka*ͽCzk8ry`";kdpŐ[9!Gz1 2CEUiܱlFI<_œU/M $ECC{>.(L#U-,| %'vU1u +e~,7 dudkhA/C~6tf A4gqgVV۔="]vhlm\ Tq'O{O&]0f{ܒ. !|; dN{`s{ yG3whU BZ™oYg?qہ%*YȫVqϟ 9gB}|6 ӖId6K#N@dvUpfN^)ʖWi`8̠7 yedLj*D#.XZ1XiCA:P1Z!b_27;NӏSsq\v"LTQR (՝cS gƾvuHkw,\X~`[6c="7ĩ?ФxMQu̓]=؃(3OօP|O:qF2{b _VBKG}eV*kK(%#Xy=n˳8[Z>% W!tN=\7ۃDZG3ZXgWxgnw\6z>T[6&4QE^?ڜIPu {;S<(`+ب3jTDmPʸx3Iܝ!Y ^wmnV}G"ْIqd9i0^qSؗ{gFN-A_0&@I5:JXn 9v2S{O%y)Pn~'.$oK o=l n g{-Tv:Dd5CcB?5蘬mm֯<5Rsf"2-jpn!.mL E^uUo0nl=U'^ >[HȮRi-txR:΁8LP8iCz K0lo``u8"Ά!Lbkt_渒ɉx/wP$t>l[68o$jO#w)(\3&i/L8uy6d|ٔįGLnSPwBSMT'X%G2 $#| `n#e`FE",2 /e& (Տ7o;V-.횄^MT6>zwTR2G g[,9H7˝Ȭw$OFR1d8=Lso^ij>taV6ZIijJЗL67e*eg_4IBEp<v[u`΢C35d$f]|BZ!y,@ U:%խ3xYnJyI-1m"&ܛ\y y v+{c&&X'j@b,l*YhZtܧ*i됝+| pϞr )nUBc_olxdrz qd=؀忿y"\LhiX>aؚ3mYta a%ݮJΨ1Tx`1W zX"1TҿK@^]a^a̺`~]z5I ,7,h 2S4$s 5 63 J^og:+ӊ@␗œ^ҧ%{wÜ^NLGZ~~4@sN\Gan86/HNzLS H| i5,K ICsEfMI(%UZ DpdZhI.X8kD84S()=g%k{snz{X2Dut?ֈnB^R[3R0uI囅e:mlef4ªaϞΕ,e1އkSs(B)͹v-Fq :&P+,3spqل9YH>5k=}&<5[,BnzY*,LK&͎ൠhMDl\ɷ\pދ$ JzQ3ٳGpB^иǩڏMrz|: ^&*<&bmF+?fw w:hrۘ"qObܺ.=;H:qNO9}?jL;hϗH5`Ǹ1]!5p}k(#15' .'hBtPn#<83'Q7R.4^'a6d۷ǧ`(S裁[x皓݁0dOFp]L1R(ٻd$>-{J%p̀H݌!jg89FN5\ϱKEeƼVRj6]+-2&p"𙅐F8[ {P%`0"rx`v]eu[>ZKZc $-;+닚I0lۯƦLےiMD. q;`S3E#xݦ ma!Fnz)8u\ۓmllL Mx(IUbZD_epNr2"w*:"tE:^1xΛ ?_>SSfyHF{ \QLdv:Sܷ&~mC?5ľq]BRC|c,ϋIߘn;AvP?LC$.#f%<&4ѹEv(ial*,$wcRq2gh >%[9&skP-`SQ\ cPtZn ph6ޘ\VcَZYˬ+&]#{G0͕BXWoK7|U\\3z^S&H#]ۥƫU=qn 3'iOu0$GC];B40Jrܴ jίXf:o QqtA6#H{7EHX@O^E2 7Zp٣^]Z5}$,C( xQ&Wk?nÎOۑv9TD_DGl2Tޱ̓ee!MWҞ¬G"mF4_umcwE \"NCɌT #.9WE9p̘ NqtT-ãN^.aM&}BV' U:gd\TY*ye[6TO{ޟB7Ŏ.L,P"+_S }[C'ȅҘsܚ ca>cã2Yݽ%9C~Xf`h g' `o ħęl,PMnQr94b7_cIh˜zBb^26wUq}V[ZЎWxսK^.kSyߐj4ę+O!B?BB h v%v| eJ. ft!S#ޞfiƎ;˻R#P2׫+.w ֗L=N%p4W}5^PW1ixx?z[^E=~ÓeVz^ \Ļ])t5B5P7#=]h-Ke֧8t4‹nO.KWy(F $W02 o]Hio. $D}6c*n **%726+tҍtMEneh$ybzwu, x'cEzv춽e&^,usVYb'B(4'B\ͦgP@;Sx(1ɾ=)[RCD@g^{ sNroIˆrx|%H fE`Vd!G<0ˡ>yk΍p]v*TX|)gp(.๓qqW JC9ظenH؛m*'t޼GLS%>QjO4[ҖF~Md~y "fG_G*WB8ήF,!8!(6 qYd"0\X;Zpq!xP&qj|#T48IE}(ЧdEǝEw?Y$ ߕԖi#@0/LF;6rK!߰wS aI:{c+vf]Рu å ].Kh~\u'{woVqF 14\?e~ߎxraKaLS"͠][Ic2"h0Nrv qEwJ]zu4dlp&{JBS0Pfz8t3{.>'{j5@n0 oR:tP~K*UvZP`|TcT3Q RU7WD5n4H5.'#o&W,Mȉr;4v@3hf-䧭qlDBf\X_&$6}:7KA v6t y7=M1 =՛HNqF>jcb1@z!6#7P{Hn80<$RJ#Nܬ-ω~:xqkV~TʀdLU\1I4MUЪH/Ljg|A.yF~BFB%f3CPC7eղ k-1{ c%:VՉ/c͸\6-t)/yH p.+ r+aȸ9OUqeɍK6[_<44JBjkx&`c'\k.bAJM/]AfLUdZD{÷!փ^4s% -@bu&cki`|H{~7p4Jt͸:~g0o`D 9`S {]5"DU#SNBg)Uf!}ɼDZ&$8+8x%r>[O"kUTQ xTnB?h3$2 -R:v ȭ>,*?'Xn8ki /WtZˆ@E>C+-Q>ړՍ`|78`j= &1^3W'U9l PU.)-Zq iP =#1,Vk%/+W gsd.݆dM JU.+ |U_6]'vpG t9so/R7wZn2:J]9`P膭t_|>._tb]$8/Zl?C'A%x#lŠX/H'E]Dlsa5有-I*%G>VrO:Bi\Kh׽R6TCc:B$%JIitǼEƛ6x2);d4qp:>=0!ovba2hLܚs J6Ac%* XyCgN巿-ӽXxuѝb7M77I~ 03-33џX9_/镱^dNBn#@YNʹ)UBȜHz7*{5>oq1tp `/ _[ń[?Oxj$_GUQ Ѵl).uTeA?bRbi\?L4r|eV%b`UY={ dRXMqz6ŃDke:3mMک &7j;4XlfB Bzz )sK%~*{l:L8hScźFn ; q`gKZ7o$M$*\j\d k[daAC]+3j -!#zYQ=}SU[Xh4|OH*E?ֲ 3p.ѣ~:eIfzA  O*$s=Ԥ@R4ȣ}WF]K/X -\,ATGHc7=qzO|i_u0*ȏn!Q";wOˍ<  ?*@KX7% +]3Td `d\nY31H^vfA@o+h6 ! gAM0Ua(4?DVXR)phު#3;p!_Oh?K8_~I^y 5 Gq}aVqkM)SqUU/0ֈv;xETȢ %l?vtc-Y< 9I-$l 4LߥV&I9fX/'Qoi\[ hs>N} K&kj=M!lG&2-Tf-D?XcӢtLɿ}P[[j:Ow(8ݖ A\|Ǜ6[Ww#1k҇?ϋM+t%f]2Qض²['SON&JϸtcjY)5D2RnkR53{)62+AP*fłBBgV/CcY[K@RE塍Q_7]fGz ;9ZvvhNuRX`%#81|os{DyqdWW=Vdںщ/:\JǏqjuZErf%[wzM#!yXîXrv @= 1)aut>{S)\s/dQ[cMnL|CRybs t'sa3"^La[B(JM5=iRґ {Th2n祿^kPBC8E41GrK<xG :=↝ 1{wD@3Lo{i7H{4/lP%{Z)Ueb/tkz7.T` [4IS52l%Y ~0 |`5w y%:uGSG;4-p^ +='ylZ % )r.YqnWGzR b @XMDv~a9xo4D"<쀟^d\RW^2{n5UZejd&Ű\72C㌧K-;x5~MT:`4붉H s;~DU6,UE ʦ7o6okx U<`%TK XTL vI$empmQ!tƺ'B,vA~|no,^HQ-F(Bgq33,DG tu SCnR0uasCa.E +;&bL_|Yph=l1LHj= td%1XDy^U|-]Spw|<6H$+cFq Yα.? C]SF S*G)X}ԐWlkMbh*|?o= 9xns(NM\8JBȠm?eߖ*gO  ^ EGi]=;ɹ3[!HEn &ڠ\.v:&>'%蠏l`֨8dY7"0m< 1yjگ4lygw"ڨ`hq" *Qz4߇uAm'tY,I6O( ,Z l w͝b\)ΖxNVFmrIz"Ć2sS,{g 8LsIc , l/LMM𚹘L~z']MThoݓWvӅQ _8d?JHﱦٶ{++R ٣6sBkZ1?@[^ /(Uݷ]5 U}؅O3|U٧ Q 2a]k/{xT!(^?ˬbێ|u.5f9u'y6~F 51xaC#IE?ZR2qe] apR 46 [Hۓ ryV@n2MGr0sv.5CNJ| Qs'k7x[-˓&x(@]r9zgSvq}1v YAlmtbVm}_&>NO64X^a-(+#t̪2ic@4GQElղ-vL7;3rq_s0l+msgAQdcP6v` ߻m5\vRM_o|=ooթ5"֜b»yϕ4HeIrB_#&*{r+ÆEoV2ߊnʭ.1v ;!jc}SQs4 sGw,8G2l3 f܇gt6vz]Au&bVzh=k#m-a釞n!7Dž^X>N4\*,w\IY>tJKAF@|ţ>Fpjf*do.J6Xe!4blKŮɩ_>ך&6 9d8THӯvҞ$^t\]keau7@1[e,RUZBu0t(qI*fBX0@:C\d| {}=ڜx#G6y|sb6C*%=a?0HU|"=#6ԔMBJTxH -+!sqfߧ"8cR׌A;#ka=Z>l'"Oq|eI2#~Br irDsɆY(ULK*`1h+Ǐue{ g%,I5BgH麉!ix8 Oțݍ)yr^kdҳNe"IB3$- NÎ)TW9g\/z8{`8d:8 RFwfҰE(S˾S͗z7(PSA}^`nMu(>ʎ!0cPȝB:Wz)|z;G vn&A=aE[Hޚ/k8 (lMk9oo*{#RjY*h K nڌ|+\Z-nxd]Uނ:Y6>b.8 "0~W-Hb C|TMnKDA2S|:Gdrㄿ<!kCݶQYpK%(t(?rMrA[hA0j!h%)/`ce6$Zʊ@*6 I9Օ c'61!d-FP'W4f+/oXyD\t(U%FH>Iw͐? /AmH>{)~ 2[p[ !] .L x+`6r dUkӂ+@s3Qx=n4:RɰUo 3xEdC(mcG6}ˤ=cr I/[e@`b=2c~8 =~_)RAZw9rT;P-nC nVVFaC>6mI ce&t<|>Ԋ9Lm0(at8 tj柑a}$ od QqD'3y݋6ToLN栩p}S O'݄^ĕgkl[⸏C*OQ%/-5XF(o9kZ>,E7mJ5M)oTMwxχq[%Q d$ 3Tb0h'P-o9J_(Ջǫp?%,J>K{m>B=G_|dH4"ڹwV3 rj68[b7љOgE>tUͯ`ْ)w+FĮ\ ߜ`;9J:ثY<2V0 .8sPw 7 <Ėw۱;^F>7gd>0㞤hIVW/2£ڢPWC0>Olr ݁:e 3 J19|-p'S5!-\LU^bZС:F8!;/y2,fS5~N5(HyV-sx-.emW *v$ޑRDQUے9<$H(:ܗL, p@L+y?1UZw+eR"@RƷa7o|T>N)hZ̃)rgZ;J(6 0Q7QKs@Юy&)mnV%jq5ZE/-L*@W -PyW8̾$ D+ԓz,_eG}=G\uC!,BLxa6bSDh#܍levNH_]DfPֽZÁ}0؈' fyG(ˇ%]OW )]R '璋/o9J 7ZG݅œr:AV;GHVe}qAn*ZP9ty3s>Owy {1/OoD?Ƒ*{:]!YKShrW'_6s17.]Y1&zJtP Sd!i?e YU_%D&>;5&NQG-8DC^gΓ'<$d+_L+6oNM8A|'w^ߔG~q,-Dύ[}kkV+2iӁqY_TOM^nKHRG<_ *U9ʒٖs$BwX+Bǒ7&| k#61+*E*$J܇`v\ኙl,dߠ3?a;xO_r.9zڝvh݂У:` l A{5kO['z^3p"vIdt>6V!6bM 6u,Rb8]x 䡪0,z7gitW %.=I៲%i{#ިP6m-9Ty rKǚ6y#;*ˬkl#{"^^#'ʛk;!Br]B6I*m;"qvj $d'P<0`!R/%!92G\%F[]xxy6 gk[8xW.Ne ڷ -!8ϹHR UCg/ec3~e >:73[c (Q9 C=AQlY8nB ,֜i)䄷ȓ8j--ez+wg/,ϭ;HfKNu쇉CmdW.8..4변~B"^@WZ:+j ~̙z;d)'+G#jD_Nv+ud57 [2jZ*UIܓ/6@U鼒]'[ h/RS-?"y #4ٱM<ɗV)Rvꅖl-9)7qȘ֟&ei#,Qkvh[MɫXi HunT_J&2bUe6"pR1x F4ҞAS'pO 5䖚i5%loFnԞZ6#l X|E@l 1:bt[O?eN3<і%CUm͔Ro]~T{% >,e1AtZ&ڤQMa@4w>htl6O&[ͩM"=Y%K { 65-吘d_"c[datv I|ĩ^Ozyp%_|]GFge9:b0&]4#a!8z1t:vdƽ"r,|n3S.zJ p:ȷǐ>/IкYGvڄU9Aڊ1L^pt|eg):uC3Y gWH1. M3%7gĞcM/j? D0I1ݍiLu$/J.=s8}BIn(g4g}T+!z}vh(/U %oU?ɒnag'p p.7t ",G5]w]S8$L˞6Re%[UPt|5Uz+wKD+`G\C8 Usԍ 4c=JT}(jQB<jֆ(WO9lԗ-j|A UL* }B2.i-mdڜ56{ϞK XRjԌ^kLQ΂?Gm8M}C3:|Z&0YvV+T{ȃd^s %lJ?;aw2w/`!u44h@gPWC5LϲrNdB}lMK/3g̀l]w;/%tQS:83ĬQnnSd[JhlRȄ)A>9c4 1],&bO=uK0r%̹8`d<{ +d+UxHkC"fjE}'v+YToL]/W P,:'Bo\ ~' PI{W@N#Fx42ByZ>yW|ݼ8X6ӽ.@ MH^cl<{z֗PBz;r"20žZs!%h&Ҍi**N2XPssLOYK I24d<&Noys#<r*~~WkOt7:'z{Q|qjj'W ' sxCNݝN9G\`[ I?搄NFGgW"a(\j$J@ћqa4~81^dGt==|Sxxh/Cϯ3t FniU evѫFmo+B-WaH> .V=~OCcGv>}hy@ˣjӰ߻=¸)ce1Z[o]>Bޔ X#5@Ԍ! 2h,L[7r|Z<vӫ[%y-ha3D_{ @{8mn <47lVQ O[ijmhSDX,/9*; }k'ïrܓN-j  #yHz=q,T Q'㟏 hUUl]a_`pm(1l&BQ$jf;DBTXdQ-e+Wt𱯭*Ms/+ݩnoإ~/i ~3RD3ajN?~7S-Յ!lS$p1u`)~t{qI'LYZ &AG +>Mrp%N!b}BhL˹8͒/Zad_E4UTyFueOyHTү3GE:Ty$Otb,HsV}%"}jٲ|tGf955Q*}ݗy?k8lcLHeۉ# @+>wuD˞,ߖ ~a fEnT]NTYMwmDx}Rۋz{ z0ĘlTļTj6H& *`BL+?Y` cʳ"ڈH"ojSy?/'?)+:]:oN?QFNW NFԓg<8ɩ~ eY~.Jg9"+zW?qҤ5zK3a9Rp-rBg\syF iCبA&~B!f*D.o5.l:Yxh }f` :#8:Ējp]d1Cx6^ӏZ#&] ͔}(+TD?Lx焝5>[`fM6{8%ۡ [o5#dwp,53kM̎n(c>~$ w=J A]~}4\T5w6Ƙ]D eaq"]pτ 6+4~acu$]fO'~&C/)(vPͤV>MXźWQߔ&@[ޤ^XRzo=quǧ3KgiY)׋H < j^ڛg2r WVSEQO]g""EW" r0^C1XA{muqb.[Bkb"H[_:)a{v*9_rAGfDN9/IroAy/c쌭?$׵y; V 'cN3aG" \Hַr%al^dWqT_ MɇِI#*q9Z'!`3D}eLVEKA~$Qc<'"mgX@x,:`ٮ}cȃjʋO*)QDfy@k.&ug8%B2==ަLmG?#co F16(9ݢQ;B$Ru婡Ro[ӖF,aG_+ċN8}Q R@l ,0gzli um~s;3rO Ov Q gHQ o?v%4,Jg-8T=t :V -{'< S4#{&RLO1oAOtFڀ>tNoz2db!UUEjU PnWn9O:$ P"S\)l_Pƽ"})}5܍5effn@ YdvfG:4>3,vPNM]"ku{77NyqnBUDY0ˊ8s˸7d\6&QdOlVh }?"JLmi38FEUs sʮ(g6~l4W 'O~&1 }*;DN3+J^ۄ+$r4ްiDP^$ڳxN'|X%J_6 NqX 8>.,ţq-_̓*#ǩuqH+Y42@m{>SB;lͲMJk`1f>fg 6$fنo0c*N03do&&g%b[h0]P$v<#|Kc]zXg\ܜZ0!nҨ=Pz;.ʏ2?B)iHwưyB5q!:;! *NYu(o_+@þ㨏gOu(0e))E B:N0bHSڮgE2}j #8p*D)\k0}>eOUH3_J~Ur3 :åU~ y@y(ffZlyZm13(052R=6uюIjz']B˶Ϗ)ySGV^tu}h0{=O~"_o02DHS= 1l[|9Fmdս0; ܾ(@7Z%xnTA5ͱEL}1zE&.xx7})ߥW3hN P뽭}-"/讘SxY}/%0H+a~L/1ÕsuSbXTÅ dVeOC 44*S[g05$;}ҭ<dݖю1(_<ˍ" XPqtߙ=n$x޴@,5It"m+9GƱb&jS\5%b!|$L~ɫb1(&4 ?1]+=+~yoi ~ E! kڷn iij47½kx^2:ULh<fBm)W"m$!60)~B0F9>WWmw[ pޛJ"yD=9Qv |bYoxϱ4S=<o~.yǯ$Gk<]>\Bp B "gkv*CNGJXD8Bw q  sG:LZiHY\S"y~YJfWz_ ;_Yw I'\\*L}9Dݥ?j6*a,esχzƉ| P 5.Pur\_ٝ{2[.+[D yu u*^9 I7x7h2H oDeP.| l=!OԞ;_|>+~LY֙kI=9~]N5) =uqAj?qgCŬ fp]iԏl4HR$EM [óY3^~@ڲ_I+܌rXZs|@C+q.aJCjIq7RHf^YlšB#/o"mx00?z< ?3bOjz#I%Q-n3\"F-ž.!zupto %oF{jH Z̃ˉCZm(/,!|h/D]ߓhTCKBes[_i]<4)xpbc: kZO;i<(g޵=u1r~sHMI۷s6g<Ma͝hٸ:$7zSI۔r"R{#Wc˖~\*J~wkA0T`'@5k$Jz`lsNYZ9 `Lzf wKm>>5<@7BXVL9mS8Q#Ie_vyL̺.mŸ7LOX}^R ,M-rkg+/s^ŘޜjbAf,Dž-/:\t:&}aD(z P>`<Ԥ3FvMp'iZ `FJ.' P:^qނTS`Xr>Jc DLg~)Ĉ8b_1.0#XUzSXO1H^c+)4~ڔ`6`$ OvU8NTfD2._Vj1ĶSӄ1ʲEO$mŸls;nެEM S;H)qť #7EOF%^ WRBJ zFcj]ޙ/*68 U#G~ѯQLu:CE,X=J埛Kh޴{@c n*VwT4FF+B^SL6l|DD<'Z$ ϯ7l]>߬ѸV*Z"6K (s.|)Ն2x*xnh W PL [ZФ_Q -6gY89t8' Wg\F*,bC@1)=$lќ6ב)QBU ι']!Kԅx׶&{:'zqdܔtK|-3+UgmsnY˽xd+&} u|8qr{m,muT1:^J3}@K_VW~E=yHXRz %ڵE &#<6` OVdisFWNpv[:3k"Zw(8مF;fiDT2LD v}XMQpGhuVK MJ6N>tAokhotD 13Gh/7Ƣ'݄ &Ek5iFeѿy^|I:,~A WPw:y{e@r^oG3׊lDQENZ"@{Z?HאVJ? uDz! tA-MmҤ~ ^$lh*;A0GKzh ,ċf2WIcUL([R=VtmюE9:K_Mnvk'>><8lk;y6.V#5g)# FDQ LݺŪmgYȸ츲w]al:a%ؿ@tv a<Z_ |%M؋K|8 FUӹ+#Аc KG8#MP ^"ÜMP jQò[7%a]6뢢3 Z0njOC̥gsVE0} /)ɿR-0֞bVss ޫAmnavmppѴk}Xƪq6G%!֥l',ZecS4KOJ nsֶ(Zh7tᕹaT6:>Z-{%ni~k 볍 ,|-8->[!-9þEqhƨլMz\$A^* [f>m+ID(wqtΑ83YtDT>9X$frq|Fic5V8NO纔F}_l!X6SEpdlFwzANpXܧ77m%NRoX̨ڍKV|_M+%iT`^ͣxY 1._PKY{Y4 helgMA7\&,B=1?\ *'p)Gay)]SLsb>DuGMXϫU'd Mt{nlԯ>SwE(E8le Qh.#վ6)XfDtD 3Tt_ۄ8R/, v/HuL38 VD0;-zթSa>)A-xA[m, & VsheA;́4U]͉t]Qc ̑qf 4'ɡ<-X@3vN,'n&v;xThWj ݠ tZ|cu&.2?ɛď' *M CTU< u N$#,%5 I^1оv[HT',M)r#ٻ N<yHXy/ #f.=Uy"Xn . <=yݔ:y[y&w9-Y Pk|uvlp1H j{FҠ(J v5GjP8@5ȅ"4"jVQjʼnR0'ozhCV8%jh>g3"\ "z3%)75ɫ\)i $gȟʼn⽲y=kN˦)G nz2loDS3t')N@Iru]W{=,W(8hlUCq/e2F=UڃDNͥ툙H* > 1*,~,X5Iė;G)APxm|Ca'C@"sMZi ꐺD%ZA^WD1Ӓ\ %#ð/EB`ȩB&Q^ |qO!XzH r-ڢ\ߩ(2Cvԉ)Q"wGbIۚ{&h,g*#4&8 g9=_1;2ftw90+kp)-RH6FHfz`d ҩ3AA;{9Zx%b"4Ing Tf24VS(~Uz{8pF(^/ "ӱ-&jgE.QД-i:@i2tƦd] ]yG]22lЕ]ǩ ߓh׼>\p#^!9[O̳gN"̑B]sZ EpCdFў!g():7\[W3GCV{c'wf&. eɖ 5(tͽ"*ރb3[ȲI ` s E:Jt5Ot8Y#D;@r%等.1i ^zVNt㡇  _.Bɤnk ۙbFYj.SQ0 5~RBֹ58e1$P Er )D89ȸ+ l,6W\a#aIﻁYlw^'z~P^ІpYahfO<|Gj~~ dJ{(I?Glt;( otIFBբ ̾Xm3`:u1gVHU`6^CXVٱjk%z'fӡ-?ͮ^-&DǪ)DLj>Ӡ XQY# rG)c1<[xN)zMYǴe,UY0dQNjHpfh#R*0AO)_cVNaѽ7dQӒ-8M=E#a{ '~lCGyBEl1W} t䬶 oKP4]3u/F`Tnim[yF#R{xKk8vx2QBhnweF)Eƒ4LJ>VP#lm[KlD=G?90[ޘ0D|9QHDWL˧v?z#E^~ۉY\:NsETKmaPd# Q2xo`loUĠ!˧+dcC183MOHIؿh !?jr#bC*eF5e˚Dŧ?"kb L+8&=JM&l6Da͚lJ">Q,.qFh&;Ɲ2z}FF`揼N|!mS}Ȗ_ݞ: TjƑe0 1W,<}6z+?gpHZdY?]  'ZA_cʤJw ib &lZ })AП*-jb:ag8 /E8?U$G-I[{0R\ZX,SLS> ڐ(I|FE^Øe =e;w<5hDr .|Vi{:¡}לp5OB `x&g0}a_> ,x' H'Ei.4}]VR"Xg^ ܶ]c#knOCz\<f(ڡ}{Úl bzKK[[%5G%\ RoT a%+?3qIYxJ:/ _Вү/a5w#3\7ȍs)1̙X!wa"R[{1Ilɴil4*+yE<_5Oۮ%3awF~^ext8L-;bT"ϕ.EB_7sHҍc%зa?`ObnKi j@vew w+hub9p'Ã?ݼ9?~k 0hHxfyzS_JVѨ];g@P/"Hr=W`CMh>M@{Z͇>wr±8y;jN,Kr@L#u,OA npE>fVV&^jcgfmjX o}]GŐ)[e"(!"xhe.w6};8U}SSV:v/6Z./].qNSѷJn3|b^}1;Zpc=m%TyIW z1~F||;:2 k~(iً$,X@d3ツզ-+c V"`#$ ;y.3;GlJEg5Qe7sP td ?`dBމmkcrݰ+1c|tAq,9d:>&"ݩ~5Pz6LB會qT9ӫ]6 rH1o,Ee}-E٪9]A%=Fk}8gS .:Ǜز@l<<@$]U<IEvMj!:a6cؗY_׺,evatVk$ ]-_ i"}CT,+T}s4|!!-q"Pe-$Y xSd7]ԢQtG,M6cK~!wH<a`põY'1넇)῀S3헅a.n#x'0Y5oU{my+bG*>a ?r2"&I9DKS`EAoy 9†3A F:ޢkӶr(J]N\ANLHܩ톞i2 z?_I!񥇸Пݚu/ /9kg0`ysD(ι 7)<]:czE_ӑv9E_6tӚX#BŶœ!{EU߉ PM [^Wjl2ݔS?x~cW+t~#8q7ɇWM Wis)%l->$'~v!]?XKMYxW,We$!CHԪJ6ʲsj'Ɛ$QjI5stDDPftj>6P7 D31pQ]T̂!:"E=*/F.{5`uF!O'Зo:.b>.s;ynq&T[597m,Bl?ֆX2Y/c2F.bu&LҽgwX%La%B")=(3{JQɇlK \ͩyȊN_݇/ui {UYl- #DhUh&RN>7}w{#Y QDZpM Phu d ϼug&b~b+`7,+d)k nw6]+!^2:5<* ԥ~gא&԰Q.\VvoC%ϩO!t'U,Rw RGV;ۀĵ!Jqm9|JX.nVe`}dܴ9SsXU~kZ@3HXB]y|?ҪpgdZQX-%%cv=Ss^wSs>!? TtbH68<9KӎRxOK:i!ef3#Z 1uO 2ׂqbj"DPw|_3P'(Dm},L37hE3s`ޢpoѝ"~}4A}1o ]3xN2c7)U%JIkKO'сfh1±iHh$RCFgpW+sX7r` ?LRKb(M0B%aU`',jI'3֔ך䷫oA>O G1 4NRz a"f紖 Y+SeW'5έdl 5Er3DvʀF>U{2"'yopIϠY[RTw7!b8Dƍl˲͊tZ8#bV/ZAiڢ@M&7aM"J(\rH4{=- thVXu. s S *Kd5 !e-.?:m\0#Rt鳌 ɕQ"#" i ##Da _r^ HC~hq/N'rK"gnﭵh:)IM : Lxh$VX-0><~B 溦5Gvk\b:} \9OøkrmGܺJqnǷi,mHAZGkMj*'eo~_Gt*Ѝ`@v3Za2D;쎳؛A^dR7 ~1! 䈞Q?QUH< Z;q]޴L2\ 5[(_pnCJ,J{uJrWsD4 P>_11 (_ 5s.*8\S|ᐳʮ- ,ŝAǁqe>AHpDڳy<:%Wq~/'\P|t8:AͿk2yV%ՙK nċ&"~ cLOM&82QRN}ti~t $}?}/m<$zKo*Qlۡr=OrNa'flJʼIن([ ^LeVX&W>̴JRFqG>ؼU7x+B*d'ժbRq65GԢpIs&ЛPsAC=lB " 2c2X(q77]YpߗDD߅.s') !=d.b#_2ٿ}ÌJ mEr8T-vk'!W-_;3V^Foۣ:#xqr[d23N f.stn4ghV/sMcr(J8P'ٙz7Y'(nJhIy)>CwCYY֐uzl^GKLI.iKS N[w+1D= [+JX#eXmk3`kX%6 ,@Qׅ) Xa.i<1Al%+Kh7,1>n%rF},1C+>8`ALx%CXI%> NDZVZbJr%6>\4#6oEpp,=&6vuv![:W ]D;X~Xґ6#IaA鴯gߎZ`)F~Aدq~PZ< P/&*.}rt[DR\_&Բvi@3HFr aC 0G=WrI"`u[yB^?`WחQc{ |l0ȏأu;H6 䮜^ DcFM䈝Ԩ 56eΝmmz4ەRp 3c;{e&ˮ\D5Hf[o,:J<@?Ŀm'sjs%+PpWX?Sa׾\QȦ0 /~E/r!~n F $[ l#"(6٢:gqY}V+8 :fW׀lhO'ԁl]c?24qg2 x*5?يT 7Ǵo4x*`%ô^% =*^ ܦHRl-*'u2׈ĩƋجbgLkWkC+";1k ~1 *9'..:$ @! oVӼ>Lh=m~enKw4zn9aܟp\5 >d]t؅|#r4w݅7_=䶲@rHN&Ď n)Kaz )LGvv! I| 5 QL ֻ`EXt&- gNK S-y};AHeׁax=>a`7@Mj鷈A$D1CS.Tk`}z+R$ĹSYi.,&g6{E<(14GjäwěRBBjcUS-؈\gka޺;TtdLpiBwZ@n`; C rIY9\Kg6c5HVspld{ǿ0@W<:h\&:YB]ŕ+FT[F0R-@(%яb&Yatä_T5V5.l8 gqΒx?V ȹ 7>WK{VXc'+}(vR?*<}_,"_6$Cl Ngx8LSE氬\*"lXQuQy$Gs;񈨷/Cp\;mgU5ד^NΆn :1csEI`ik3!-9 }}0ylI4bƢ"1 vt ( OS@k %*_JP;ʹHw, Q1x:865m*Qxe %2y[zf(3qD\H<4Φ t$S:`2x 1j&)LaӰ4@E\If܎Kh]Sdt[ڴ769Yzj6,LP+f%KZu{0^?ȷfd̸BagF7B5th۲0r4Lk&R{Yճ˅ N8zRep)q xߎ.,택n'0 *=٭l}zW*biUcUF\&7vz34ZTL?Mx L+UfQ_DQM8J}BAҰBd,A?p澎'$n䢝 gjjjb?:HP,_  !^4c*};q>xșl(;Wtxו.V͖rV&LxksuNNJb^)X*%z@=z9 ([F"S@u 3ܰA%mL[ s)@s.WfUO${FmZ8\)G/ xo\s6(h0z|t]}#i YqL*>0$[gǖ?tC^8<%zW{aN#qEEF'USk0r\CIs IUӜ^TfÇ9xXF>6aȡmE@5<#%"я➌8yBH@yZΒJ6$ Tm͹m[|Rvlrȉ:9Bmu"s]d2.;S @`p)Jѷyr;PKv)zuͥH+"yzBzEA:XCv4;+&pɼtMՇ#0OT%@1fM&L#_24O]AqUe]gGUҿ4өV&ӷ&?P Z NY*= _=;޺NܑbHbh||-C%C?^Q׺D^#EtL11qŔ ~lNYrC\bbcm-zp"}uh%Z^9zaLkLl/٪1E2'nl W[x ^FtLV]U' _4 Zg{L)KN)R䁲k=^e5 L3S.E! Mr]}<%)leXryDP*V \Pũ 7 ~ck .Ap; ~n5vP8HɶrvDNAcya'd^!kA WL݋ۦ|te-lUnk{(39q8|.6\˅Lt>Zjs75="6G6v)_oW~<}<{&#'@]x0KLlQ[Y{zl\uxt+%XT4>HZF@5ዾP}`#pE8p@'N'wmu%p.$ {.^o9?\T(鍀fakGԤoP#Կ(:tw]=-k}!bd}+s#b %838B>G?""LUGLT0OG溛ZQ o9V`ta\Dޒ~u +J׿Z Q28NT~2H@j<9-ZKf;V$55Mly%|Oevh~w7ݘRûk|C&0nCKX;} C`;z8qwuچIvPp2%htvo""_IqTb DE݉i 6;3\Ӧ=DSٰz5`P:l/67"OJwg_ |G_cV=ؼp&ةu k޿b2;@5B ,j%j-,g^J]hTC{}5+ӃYCr@n3d5F\7 H d+J|6?H֗],(MUn*CugTӔYk5i-UHSҞ<6AA ׶ CPG^'oϻ{HHno(\@I54%I8 {qm`'h0]:eZzI`rT|5!509Zhj|#S"XA6$"XOݜ *IȽb4 :@.b- 6&mB(o.vf}?~PQ-r PQw>:9$((B6a9vHTx"᫓KE^gP5NS+yU=uB.t0OOĉ}*rǒcӜS&Axw94l3`S3dTw $2Haɉ+˂_Mk4}ܫ?"0Z=X{gdO`DHKS_ٜVURfMvB‹Ɔ%hȁ5;4"BlrճdoP=I$9+߭ L5ftZ,LW/tBcB6];@ёu OjAF{d5 ZpZ*h^$L}N ;802vvYD_8uLU7@׋,]7ڇ#Y-`üYb),lC&b= []kxfK{V gK@%kg*OvT60kYC;,086Pͼ±/߯w^7~ZMS3owaXg,]V|bH>?-i|OXggVa!:}ods nul~H MU%{,*1|x~Gs)I wwfW1OE,42Uji~<'lHs8V>5MDE.cfz`p#/2@<+2U!QF 2؅T{~7Pe29٥_iok=<ɫ5!G15Cw~%t5Ād?4S!@0tҥPw&Y"EN o1k%;7Y[B~~鹹^SG ÕGLpXsbq86+̒ %e#3[8+EFK4>A8Rl3p^g˽_j QEG Oۛy?֠Ԙjdy0qgaZO;YуV_ I> Hg2+(1J(30#:ϭOUh^d9LF࢒Y7r!j z~P9WC*3#徆 غa%"c Ћ%$#L)ʓgJA;h;]٨,:Z pPgU#^6Z呹n$o*f0q%:tm/gO1{0:.Jq 48 t{Hzesϥ6+Fҕ}F"Ahb&@%].̰ /ޫnQtUTfhW{.SB)nE8S2@?bt<%ͻi *|\CV  # =N)J dhC U1RCuOAm\ᥴ̦Tqkt 7|A7K\\Umi( @MȬ^d̄a{CzT':کe叆|T`Yė~iy6W'ԍ@R=<4 Oοd*L;I,]E׫JXqTXO}>Ad%"8ywD[LX@cCH6`N];2I-cƘaml"&Լ\ V^ic1ЧDJ|4-Zjx>.ueR\QAF&(=#C9QŞ:z@wQjqmak 9ϧ (yI#>S'T[ iˋmO;.w?/[8'b G*<6V&}KcG V,Tq2U! >5I%oiv|ж_$8 DB JX=7usmN+DځΦޜ$XoTD4"ѯ|/_% ~ !BOgU]=8F=*E&[ ʞifGbcPR<6S %)6481P2Y5\`M7aN^2}l3K}DbZ%W#w\[$;\B,x=Lb A|ۧzV847& Hwfdkxwu'c^6 :?:5<4^t_!EU񪿺t,zmĠS`MtTy/`~A=!ߎXY nmGJNhAױΜN7&~m. 8JD >3#'SL]%q'R/ ՗4x,ܱ.szSXL.@Nǫqa5^Rr)Lr s{4D*4P/=*pq_U$;nεjp(zF&m^\# [A lR(K;10+JKh ɺt}~IX" ,JGlhJ%l,yn^͡d76s#@xl+e^ T3Q.}FAh0 ^^фX(2Zo͂G saJ9Ɔ 쨭n i 0M!5z `}%a.ڕvŊ+f(ɿL̷yK \Lu rY?SҧbH-d3r 5*:ͤr)YHNRCw!mUڂfjEbp[h4=avv޵R*}Y_}NLazϚ9=|]4xB_- =8FQw 簀ae3'ѿx3H2i^ƀ"B "i g)` Tj.Abz//- MBK|`Aتb4r%K' )U+! I*>2S%QϒHRg^[+\ȳ.8Ă4=ww1h,:,c #ԝg(a'n2[1*SR\}-ŭh-;'4rUd25UNwglDt& k,nݽxSSD)2 =[N8cM'tnٓeݚ淽x͵5vu0 6.&lK7pUz^nX:ܓvι2H-Wypș?ƀv$ʅ(:ԄɛM} /UNeYmD:uQT5z F ɺ;l5t2 vdKpn{p3? Qԍ^ P [ 71RȾ.sH($)3%ӊd0(@:a azt?e]N o|0?gU@stO(< BFG0aA\ 9El ּJĘjkNȆіGʍS.rC߀إ%&sJ"u"glA4t~ `ܼyv~9<97H=sߕ D*nQz:Q$PM4M9bmuF=_vN u955kJ[ޛւi1&n1Tet@R.Wm8t 4BKcNa;x'J~}vρX7o6">db([S{c* ߨ%㆘xc,1"ZuVZBJD{Uy.>SCxˁѷݚHS\0K#F_nUD 4P`BEBV<ͨ=p b" PFҊs>ܳ.?wNS(JgxIf3Ul( @%WKx|VӜo9dcnr]fvY1J#hl Np[bI |x^"-5.h =^YrLMYT>eTn_P-c6Uy.W_(8kmuX>ͲyR?CFh)?ÄoDKlW@ZҞesr o*}!}A9f(hs2 "c٣1] ĩ0f,.1 ޸r4r;mv *e21JA{ Ƙ\ugEB9u>a^I0FfWGW ȥp)Rg7dTXxN!ZjWB$@sJadmN[Bּ2}8/}8Cd pCxqςAyX Y[#tѨ|ڕp!pjk&N_y35%)+RI%%Dwh5J|bLbLƭVz4H۝~:"7;/;e8+'m{Bk)o)6௉X  ӫ*T gE+d v~8~%p"Vd_a,ˁ^ݩz^UVkWO0X>dmѤ컶j(s\mChWa9t;ַ`&덑pht]<𽫚W%U+H+^iy@qZc?LyFvLHD99rr+̓'OV15QԌ||RvoW& (N]_ֲbOdƴX\˞!1q 4kaJ޷Sp=5VFlTS!,iyȅ@"0Hk7$6n3|ܪg$貗Z }_QEu>Xs<|iޝF1J4s'ũe^jf+w@ (wCtQ,gf*TOXogb^qZ\diKU{vg@-{r"PAoNރP=zWF'ȲׇѬi`DhZB.W!5ߟS?,nRngN?WYrv L3oCV"9m^syWSc_L{ `8R7S(~3'˭TE.R Û`NH #6]kkS`^\H*T)6s;t*T{k!&QL <Ӄ\4uv#BZ܍{>OU*C#"椹ofK-wbc^oQ+yAK*9ƩƈVusCq#u.F z㣿}"gYNbOiNw0l#u> q^uu'}}KUd1DD$^nî吴2BF<Eg nI,+@aq̭@t,%mAF k.2'IMe<&+ps^!?׼GC9;8, >pC@Avel:!Âc6Xh2 7kt7Uk`6@ Wϐ4ZH\zg5]<^;>G%` ?+*g:A2YӅtyr[{iLyDZŒj0ϭS]=v(Gadu" Hutc{ýN>WOU'QUw`8!2egj tpWO]aYKG`*:jwR~)Q4}ՊlPܧWa$UV1*e σ!Y%W?3U)sv|8CݩؤPN`/g@L /_֑ t]]L-FM*8y.ޤۯ.Ahdgգ-)hOE XޝmYǟR%} PZ0nFmC4X4\7MpM!z[lhEv<}5 ` l3g ڃϋcx:0pMj(oF5z7/v-* boW%.lǐdhPS +Du%Ut߳ZZl_hƽP7t;`>W,g Rֳ05t$Wf,EGxD!,"7m)!c2u_Nа^6f.[h1/{>TE~Dz:GG l0q$s}$uP;pFID#;M_X_Dr[#n/Je6(M'Lo~dM-a{DVR+Ew1Sk-_98>= Ғ1VmHHF\ (p 7vm9VG2KYCj ԼT*=Ģ jŏgu撪Vو-]Лaqlzޛg?Bc {PAh*XY'UOh U/}'R(~23wnUwB ˔m1Y>DZ@hȄWBfmx45]J dp(Aeз/tfk{HӚҭRwߗ>T|!m7>l< D $Ѷg1p^ W\6N~_(e^Z2!HlD\N %W.r솦B(7+ut]Jy;r6Z3TKjBMSkzUMFu'9i\Z)e@Iu('#ӾLBՌ*s?\l4jc|z ^olxo6v;c);|P87;oph%Ux`nei}ėXcI[~␑ 钐iJ) $Y z |w|}n$ W)Pʐ"qk$[{G+:еkI}:VFkՑ'AC BͨYIF" 5ӥ<ioceJEt{r0jll~Jl 2'/+cF/Y܂*ݏ4Lo:.߷$w^m˹Ϣ`N9F)RK2  ( P rDX*у@HQMK711/۠2{<.+ , ]DCs?ڷ/sѶ^]+XZ0 YO$wn݃ nE_!;QӒt$|J/F>#dL>U赠Dyۿ05Ls#d~ogҋQ eWWo2ig>7K O\Iʷ}`_Bvl²!ע9"2BV%FXŭU8ƾ8ҙ+i/ Sc᤿9-bu% h]_B\<ژt? K>$߷ ҧJ2,M󆲌ȔwYcU;v|8ٞtXQmOrBpr])rdt׃)܍BK<*Ъ0]'p@A#[mWq?7+ bpj @4o%'W/eL@Rv7^  auB.cŢ4U_ܛCAA%gfP/ ʳvSntr3tӴn`C}KfEfa,ÎD+;oN3Us$AF1gGyDE#'g|/,EZ4|ٰlyCRG;;u߱Oʙ(k/4= ۴yēeX+VV:771FZn[\6?bDV$NJdmD=XUe sH^%C_v6"X5#@(|oD68arGg:)һs iTKͧFɱCQFΙW&Vyqf?emy E.r;8kɵ z oend*;u xo>Wn9F /."DBݒVZ6ȝ<+F̲3%wX OQs] ~$NJ<!Feː鴘0\`/)yf$l[o+܌yx:yqyviR>B]ts98݂1bU ^  ]}E9|NC9c1ĀҞhZ).tNIOme/PvA9Eq)q0 -m\8.V7\ (+!@vSu+4>eVHInr,j 9CSXgd?y`9fo/5ϸ=éUEXh4$wZ3j(HhZVQz.An)=rQS`>(\bg [\ z{bo =p5Ϙ/'&ikikLJvw׏4 ~v~M9W~0Gm'oJ`85ү 7sCz/ _qOg`ܝͱo/jEx&7\܈%!+a}ݱX +99'"0 X.Xe⫻ե'݄_g[7I  1> ir_1 L)R~L/= RP>neaɣĜ\"&Da#2)j7-)[굙vr]K̴k q 3ʻߺ ]~uԨZ-k@Ӕ7\q*6?tlSHE PՈ#\0R\eykJ lk8A'TTB$X?--ܞ鎤¢7'5p-h\&/A#OW bja3P"O*^e dAEcoz 8ԴbS6cBAŀl) aGe|r|Cuj?ƣznJ?n$Jk-]`n&ՉTA%JBs;ʴ,ivȋ[Ke)a@e*%4q݉S|tpT*9#.R{OfiWP.%UJNkS戦l0NNG&Y #k(RM{[]yqꍬ`018DޅNy⼏)PZ 1g$j1P[T*\¾&bfȱ1fQJ@/AH )`؏ vD1_&1 IQFH1<{1+4dV|'_&dzͻhFtVvxt֛4 Ъ/w?ޗl;l)WSD;sti?(ܙQDNWJOisvs;;ƘOs7y]oOdT%Xg5%~H^k)&Ncy} Y.#v[_rbM2&^~ , 4u CpI ̶d -sd_,Z6T'I$MȬq3 HF7e4xHT0W0YagaӴ EC1͓ʜ>jlTcm5l<&] 5^1D "i󸁴 S&e5Jv`X Ft>KVOI|Ky<]ijp;ѾDZًeѻቷ%]Hgʪ,G,M\yto<~͓eN߰7$J~B!驨|:tv/QV-(_C61F֜Ҋi,>{5S$B':銊'o:5/Zm+FQ'@t&cTlwTͽ.o(կw^ռo'ӠA)xw&' |@P8MQ΄b agm룽ȤJpƝw/%ÍyϝWo|jZ{UG7$ kE$k/HHX\d:k;%/$1~86Ig U'zBۤ@ؚ%5ُ1sY~΅#5nMf0r.}(0pP{" *m3K[Xz?1u?0 Ť8f??&FKц̀5F#ځR v%B Fijp- nb)ѧ2T"Mxi.{#+HBQu>`3 {Ol]4(Ta?Ǭ:LJ'2vb$[ePf)J0v֯4ְÛN+9^>rbc8/vŸ?pI#ʹ7&_(’w/sXp+ӥE*_+M OS=6yroSVho ۃ/G*9vHɜ3>$á^zQvD$'݂7yXL5rsUMg[ll$$NvYd|j;³o)V޲(!S˸BTlԩQ]HV+ה tf3),XZE=#:du|cO: QHymBh_\6h~hz'U(|6Hk_HWH̅ yؖf~QdWB$^E\(Y nw,O)Iуqz6HH8x c `;s80|4\-*RJ34& *С-"ek# yfl7ea$YUF΋2 -!+ԕ >GvZs`g0t\u/U+BDJwoWI.I@;І ~zҥF΄{ώ6 F p 0ʕ- dSՏ0X0#PЖq>2 i0X">2Z~cj#=H?=/\'!hl1EpDhme>sH+O$ OnfQ#Y/mtӑ߲SANcR05>ּ13?4 ޕ ODV94Z~'aMNTyTJh|Pbf7A" PZV:ђ [ bB}<_c+Y('. /" bnX'\,OM1;iC󆫽j>k {NẠe]hM(`ɃEza,"]Y3 F;֞y/*^VtzpQ=gK͙Gcql]̩W֒97PP[;60R@)?lz-qJ} j^VKm}$K䮼馂~z[Bu4]tZr5BpvoitR?NkXJ7h[JP|5jz E.5M]U]ȝ.ꥳ5v+Az|44a$2O׋+'\gx03_ɫo[ Lchf 1ZygHnШ셍~פ%oWN2ODcAB d8E4d"_pBBi3'! \"HA5j5l&3eP*O''Kg&ygcRXB*C_#k&K^'yMfF`qZfe2\oak}CN~7:BBwWd6}+*̪yR2Tg#u hGaŊ`SOsNA,rRVĂ[OQ?ot>dOഽ6&E/ ])O^g2Kjr_ hA`!7._D3?ZU*җ͘03iN}w+tC5\ӗ]~Z| :A\Y*(y Ztet\?ZZKԩ lx5gY]#n4->īv?~>lRP[f@S'vvnd!Ȝ)fp5X]i8 0Y؜P?h\r5IX4s:u!8\26oNNJW)e'6yԖ 2bE2Ӿ<(PD/ElީtNɰ+٠$ӈy!bnMG!PB&wUDr 1eX\ffGL=Qx2ۙ iaL.GCƏ6Ű9ނ>9fYeNz|@'f46 >/8K 'IJ;i3eN艚Oaz}.˜oyYSb"'s5d v֞ȞR*F,k%ɩج \  xgv aF ?abM17ǥ3DӯraR)i%>?y/ǻs j-cj5*@,Inz!m:Ga*)%Fq=YAodt59FA_8FB .VFrJygz4oݒ !LO"m@ xz&fj:^\gl I8!R|f8GE GWxD"2UJ}<ղ+aaoUb)&`/2Ǒ\R*x__`d1p:݂Pcg =v#6\|Kex;.D9 ^&.4cTr?5::^8a!Kh|0xJhOp`Xv;WfuLDg!肉.Q&tw B|iw0ʱۃdB`ϟ+6xTC$iv[UXó4rÝc\.:ip,#;H'=,/bdBޝJh,_T0EvˇjpHD,*Xqjb؃JR$ʴt{J].$}|BxHWF!XWPILg {7}?zo9m]sO?tynF>zsBCU!VH͖ʗGV91sjFsfJAA S+C#XJzMwh2xtfk3(2LvVi;qd1i}\4 cojYʕ1<*MO>7:.M"c!1TFNTzW?=WsB ZN*{?5$s:IC)Y`LYtdp0- `t"]y%s[ cah6R>?Lt2m\;ÇD-gקJݴSGu[`Sצ97Xd*=LݍdG ԋ,in*^Avoy)w cɉ5-<%ٷۺJ4Jie;I.%#Sv:ӵ*gOR+YsUڕZ gBhkջnglm536#/F̆㥩B_Xڲ+ G *.NW߃^+2ԲMC&קGƢéMm, +ƶ`;VB|OwOкI^d>ߴ[sf^L6BG﹄B(Ffkn6u] Fen\lV L-FZƂ)h ?-p!Ral /ڮF3*.Mi~D܂6,Ud%>+TH 'rI'W' HTBHfg2UP*2v!$Uxƪ$EV Hg}^I;rWz"o5Cl(x|̫G橂~3;@F a \w ՎՈKr۶$.o|*RUw\#ql]8V0{:8c3Ko3w&zn 6GyKVORAY`>xjN򽞮}0~! Q! %eU}xԄ~Py]aqm Hd'WFfRWi}BV5Znf; /9_wkķfR8Ik5?9ڙC$[`N>3>z.ZR(msMYTaB]kpc ԠǦaFOO-$vAJnHF@N L{{~3eKM1(C03 4QkkھpM*HOZ'Pۼ`TOΊ5Uc`Qv!+0R!'fڭ8] _Ri aH3G>5ϥ8H[YԊZezW|7{? ²eKs)j>w4#BO[\*swt%X`ٵE'ЭuU#Z,GǦ`{x+SC QIl)"56@Q~z3[ S$쪒 ~b)+-Ld35qp,R }Pņ(U] +c&vB]榓b,_`,D=;I;S.qQQmo6gM3uD5*NN,ZS>~u|Ia~-e$} uzf'tE:D1p?0L)t"ֱ}fcڝqg|52b+!m&FEb$O R]vDfZoˡ:H {[  H0Ny˭ JN+"Dcڧ}[ro6tP)?@[)G|A~'ΖRE@O d ʚLJF@֡L80s[d0A+:SxtJ~=-NxR rd~4 ϟMib8y$"$~m/׬=i7[XkL˘V^wٱ76(PiMY4m$ˮ5P4BLDp~BE_&V2j/F~h2Ec(+Cˢ^1EE:F/Mn6XU0+89("?aBQ ?HchM䮥>z,ܟ|-$['*T550?ͷyQWʐ ˇBw|Q(oT]LjP~ǑF1z~ĀA<"(}XA($&A3qmV0H9"7~n限-( f|EUrgYƑhĆlN+}j5GçQ /R="QIC2Bo* k &K1K͏lʦ! }! Q2_9y˙V@$#yMb]t˜?xioOvm@=kI=js䱓N!0֨."GհtZ'P[JaE] 2q b+chZ;4Qgo;2o|0U`(D?GV₸}YH9 =*s6OڐRcSQg\xCzJDגQ}jDSoWaĪ6a݈܂>PKנ1/rU?Cݹe[V4T6|}J%tX$RB)*Ĵa+g([qՉyv0r:Iq됄7Ħj1Zh3:w" $Z,PXQ};"&P?R㴒Z܁U3cVQ8@k\^MŅQT^D!Aú|' LMo4f$O;a5˿HGT@D'$-!Z0 0ŸH4 {J~շfPos*٩x]P9[0 %&`.>DZT7zE uhrNU| s;Jp"VL7%uHlӕYq,3jsփ\wj\JQ 4Xt ~;5_Xm̏a$nO{G^jpJ;|O,y77YȜ+už`aX~ d%`LH{x-Wgd*7d^, [H/_jMa~܁ZH4$B i/㵶Ri.+N &;WʸEJE Z@B?Dz3פ ߝ<4MX9.a/8rAgY&d[ ]j]v3θ 7JGqFGޓ )o#s1$YYRa Y{h(FAž?9q OȆY޻,co4_o+XL%l$',o0F/ 6Co{Q2M@̺ͽڮX>ѕ(5lMQhָGV>9+?\f0e׳-@ 2B79HqA"jw8ڪY3[jL#}uWGW&?5O)SZEWNP?Iglsf|Y9-< ImEļfs4{oMV!z~u&e#`%JAUsqJD>[PJaSY7{d HhX ɫ̜ Q W8<ʷ327#N[BzCݜkHjln эk6!yU3/c]&"SlD5x}-=ĺ&>8C\${Ł+:05!@+Oq|Jn[XJkG4 zU~w+qLNkdc~یO\dR|稃yM(6D6QWDywnBX#i|agN DSf:%-))|.,dujI`$DZl "Jkظ;nlΩGHN.-x&c-*3T4W%42fOG~Er@>)7@}dK.LbRx-hFm@_P( Y@~l4[1A)ؤ9W,sqLi,8i{pB2ג Ûf[,'FXOBϼx 3>$|XfhEhK=??]O-˟RnIDSˬ $C']kCQB ! :⍃)ã~hPHt!0k!A(5EOW A|܊V!p߿B9E/jb%3Y?#(2S`8k߃X$^[Z#ό=Gy~M#$=yPwr^".%XR;%'*De'}m{o{ 0u endstream endobj 526 0 obj << /Length1 2005 /Length2 14361 /Length3 0 /Length 15571 /Filter /FlateDecode >> stream xڍPJ pw'Hpww]H[pw=@p ].{|սEY )(ub`ad krY((T-1#P-A q9D@igk ? ^)@ :"P,-޷j B6@K#[}G#k RPY8921282hNe#h ` (U@fNF@hx"% P,O=?g`ado+ߋLL@6vF3Kk @A\͉`dkW#}{ʍBJS#_%2lME@66@['GZ:Mޏݝ韛z,mM*َI(%w9 n&LWud^`^ prpz{ `ji0["n`a~=_?^ [k?/$?', rx23X9,l.NNhdJٚ<}?Qpw.y{\V{+[_Al,޴N zghA֦'d>B=DKGqK7?]*-zT ,>X&V{CRd׀ߨ;}Mn0^7 }rrw_ $L$_b0!Vb0I!NzA wP/qXLJ}??b0л?ENF]zWf_래Lڀwqf]j~"\ w;OĻuZ ߅Z ߕAwJ0J2G/|W/|W/|WYU UMt ,L>} 긯"pe؛⟣Ha\qt~DM tJC[Jy-A Nyfay{{P qw5c`9da:/$ya`f֋K 7?ivoU1ʄuB PY"SCO:f>YʝlX#MTqvG  =_6jԂ'#FmxʈmO6о:1CG/޻2L,_ܵ %/sguHXe[( [ƴ"5'J#!DjSYZX8,/Yanwyі4ݕe4pWV̼bH̫LL` [׀֣n;RXZGeW ʳ4by,34DL*]T(s6I=kRݹ:1~'v& ",oǩkgNJ8}8p:@# ,voAK_ZΣׯ<_Jn9>9XSfK,p jd~]ʒEKnl U@2Iojmm4W/-b""cI,+ GdĚ^gΎ CC.r`.I~`1z%X{Wne+zH/9'GzFMlN}X#"]&2Y$!gn;+% JyMM*x)lŕ%x ao7mX|] h2rgŏxVJt.j+Ի #8HrNDn!qAGw(RGUUPw03Ve"w [wn7G C(:3nC. 2?\C"y)is}-Mo?CՒUAVW,F zNʧQAv'݇R,y7?#t-&|Jpvrk"WPb=zn9${A**_N[o8VY+GS"u] ˭Z&DOLg+9 |pĦ^ktRG  N*&t:-0JsbVDTeXP]g^enҮ.NlB殠0hg߲ na?x6c<;O3BIIrhCS<|(<)E[#DfOtMȬ Y$2ODZI{w;"W^Եu6{۟* soaBx-%/{aJް4܏z|q_7Ws2P樎bhѦKTD=}:M3`d@\VL-IW ',j"#ArV39Gz/X<?vXEҢ-/UX@G Vہ=zf!5Y`fX;'m*DUb ug40S;9c냕|.u?G\b&ZOЧoz"$7 |?y_m_aZMFϚXƋZPdkhGYUn2K8ӽZCQtܲ~Ʉ j}0>ʛ\pȭQc(W63SdJċ2hmH Fa gME@nW" Xܨr+af W (:.d@*) k-mXϒkݒa鷱}OFFEs\}_ܝ'!“x*43u EygBqv#c|wBȸzֳc2*x~QK4d韢0wr0a"Ykeu%K"]߈D3z>dhuk͂,62rJznaEY<Ү7=&ϱo W'E_\FDH+1(7BuT\ JM(N;RhnH-.>V@_)kr ,R+6zR.{G~|>?:͆"(yS $o4B7lN0f'y0 *`qeb)) @M8G 3{Bj^?HV.w}fe~)/Ci/KnAyd̕I"u:? yY^)ZQhS a7,J⾙gd<î)u8Qd%C|:̱=GzAzH#Mӥn;-Us {s,j ̶YvڤS7 dWan&+*nmI&8%,4%r Bk_&:o+<[ŕQ3| D[qHJ_3 g}oi*gxP#xE–B1. gre ^Vpr [Dfקa}b]XWp_(l<O,sJp=р>t"; pZ4_"x.h_c,=?6yVUi_vWa"Bs穢aids7 (u.?ߑ c}ˁmn`b` BN Q4 s~f.vxys*CIpf(]4|%|$]w*jjglh5qm0 "(Άtq|%Eӈ\|9Ɯ}c`]pS~MBΏM9 XHqmDNcR2 8INE i8-<]` 3~UDI.gy Mu< FZMt,f "06nqAX"  B_}@rkzW3f53)[OLݰlOl[ Sd<[Gȼ1XeLC;OCzHDrAkz =@ɻ0ҤJ3fti} Ϻc…a.IiA2:%߂zyܬIf5A.R'{+mw\9bn g_pp%`{/4QIvr6/tW닠 r"P2wjnp\ǜZ%'LϢFWDa=*+eje~/Wp" k,t!T(^$:XOCt1Ft}9eAtXPP &4>(XhvA2Dx3 r8'!~K䐿 ~f[>ɼB7$#R٨w,i`j|u]6#(=J #â^_i&fr3#0_bu{i\uzG;䗾nTfQzBCxjdH> Q9g nQU߱b+ nOHjeI }NOAv((O4AXӰB1ByBAāx0a oGF=xfc29-22hKp/RbǞLNŨ2\!_eu,3oOV4؂yfy1Ţ\UA[+憣[D&I6ܠcx W?I~TSP2bދ YUF AphwcWq(4_b?7 ^; r}VՅ24s'_[ڙOz.,# [澰_i:GHbY_eԝ9Q pr*Ah{T1ruP0Z Q|Q͉ۑa>gdY{8( ֖Xt!ב7њ ~;Za”6ԧ](xzLI@ҩEa2% I=m鬐D&PrX| Ot!CSȈAƋa?ϕ>uK^}^f׻`}vi/JYFfgaY&ːE"9a2Cm{Λ'mo0ԇ`uV gHVկރ`~%3# l<$ibHO d9N 4.c5u`iOfl˺Ťk4P~l׈&C5.6Ӳ=.i݀*U^{-3oҰl/!<91+^%^c̭C!o7$†X'mlՃ5U]̠2$?;<ߌDJ=sUVPAّ""k}k?~IVw*Ms؞hPp^Mc[X]r,2VLª߈ԩg,6bPIki/D\'Њ0@I&POxۏخt5ҮQTU^UKDdc5z)ϾyVI{F B_+!Iy;PyTwqDd DR"[dg6/$q6gG2 }Xℾ/icWrU? "7e \aVeLI6ZK%0 dd]wm}L<\%RPd"R6X}- K]yHAsawNQc=9ڨ3S%y9HfTm`yﵕ -8'tiEA]v|?p ]BN/:d<i\|? iD ̍`e@lf~bHګŕge&xntdl-=yT V7kr&hzmY۳z&,ǧϨ|Nu ~u7W۪ӊ!uyBs$VBRFzJČd1KjyT!8Zn;K!Y /moffy5x )AxVHM5/ wۼtLt+P:Bkg2|# f I;1|ݍNQEy!.{qe5)_\ATwN]`Mf7jo62S5iG`C##8 _V) COӝOhy~!_\lJX6- h&\g'y^H p_Ix)M>'M.ƗT{Tqɴ̀_ݜqٹdeQJAM}~,KQV pO*u @ O۩I. )8I)C #2F1&q+tRBK/N 6 v&yYZKotfӱRDteʹ&ykM=48RoT1?La,T-j )8;?#JcۖYjw*UE30;$sZ$ᆾAB0et য়?qƇqm~uehUNd,k~A$W@囼 h4<˚h c 1&J"sij7 |~%aq[X-JO/;xgx .,Ft֑naQ] \:ľ֋G9(_cmmn"8:0 hY:/cDw/oLR邖.vՖ06^C Q7Ǚvc#~B{_ _0,l&8SI1>^ 1]XAy C1cr\Jyaif륥e`z$,{o*S=& 㟤>6Zψ:6PNms\ `m6D|N[W;iDzWnb̾ͫ@&BG0#'q^>Xj-9ך˾XfQzIZ0{[NV*RouZpj8;fMiSrV3'8T34 V{ &S"{ؑr5@*؂ۚ*ggl'Lő,9~xaό`i^Y8B97)&Q3IOm 35lfr_j%։f RF;Y$A'6WH)rt^`4nxۺTHpa5%dޚj|X[a~2{z>H {͵]hbKzn6T5a#'G#oEeY ÙlWtxS$ln2$A U}җTg-j =*viH& O MoR"PuL M++?$ِTup=ڷM% D= _sb*G]N*o!/]kh=O>ɗBKeT+ N3 tTڊ (`~tJ^Uz#bp qPpO09 p"Z1WC&Eem((&~|徟xP"glk/ѿ۩5sY'}RMqL\>KUk܄1KE=tuj#_`f#vqƗ$;#fmkd5( vEc$\`M5JcC㍕xzjdǫM$mʥ }B8Ŋ(d{mMX<? :VHt,;Pu>?K^}|b/`E@1 >jDv3~\\C`i\YE%9HC74MUW6PkF2FXh7 F% *!.8Zޑ[E $}-i' Kŗ_#3Ԩ83[m])^&bˮHUCz.xı ÿbX,.0*iW I8+Fg_Ж ɽyݕ ۸ C$鲺BW`9@"9/Ǫ'_3/#p\XUIvi a`oM<.؜KRY6Adl& /`OD;K `>.ԩ\?̌FDa-pUo!k4P!+-@h YDA*"`1P~Tܓf1&l(⾄QMBZik'آ>:ߟg#1ka؍Au2{9t,^ц5h;E*q{QL;<ۗNTÓ (3ҕm*m𳿆PPݵ:޲3%^g(L%,~KX/ u&"Acύ>fkmo$J!Pv*>USķNr .Zˬ(,&M{iANo$85o#$ s`x?e!+dLdֱJ`KMcTxE5ժ?e~DXQX߻Zզ:%u(sܗ8W3xB),O0ksӄkHRTNN|l)Uyy狋֓ЕַPw Mq򴏘 "I p$Q/Rdf{6 8>5/)lHr!o 'u|Yڨ G%GK0c6[ByTXfiDb LOtW^|j` "6ѷ2uY^mR k[Q}K>Lij( +=7Oև)lXCEYrsNeEʩ A OtP״>jr M 8a8 MwAAH[S}vP([%To4h٠ t"Vh|r_x;%9 gW 8a]'|d  fHbV2ì=sy X{Eiz-I2"̈́~-y4:Mvۀ:L^ƸcH[5nݜU,.%o!7ٔ8xt*C/-%aiqPD[=8X"Zϟ[Av cB=A=L*}9włK^cډt&,|]9!Tmg $N!lm52`[U^E?E>=G.^_Cc SP Im-A\㠒z)(g6WϪ8P|^/~o5PN%wGY_{l@ާr3Cve< ڹ&yἠpV\<~ ԛGYTNƢ//>Ll02= į*YΏfq;^2CZI'U!ʖ'56ՏHvQ)TɜEO2Q{0y8 q -\ yv&he.._-[LF)_?R,dWs$\ȟyoonb F.0&Q5W۩+ Qh|,8= J$ı2j^dDqS)60bOd,}0(] r]6Z2gyuFjJiK5h1 me<:2~~XmlDE0 gg1Nj@$}Է$^/Ǣh5sUZ+I`>׷cr0-'#dL ֽ?=T),4kb3y"m/:ocoGkoD<'nwg05 OҜݯ6i`7sm2\3jXOd$ n)d3oO5oc9R:S|gw0c"JC0}oщza1͋H"Sص-[=GchphMֳ)1h!I ;hP,ej Cb|zGm%ҲUb>+Jxj> Ah@>r-cNK;=KNo =,3W~Me[Xu_~םq9gFۙ[qm~`_9NTY|$fSxj+: w6GϷfIYPO~,R>5C |:3粤7Z6q ̕Dꠐ0Weu0}/蜬-WסP j˟AdZ(QBIszBf@EYBafb-EC.%EjlpuLq.4f(jfݩ+X3{曏-6Fn#=)vxvK,˟p~n 09}@YqHVlg&Z˫A@0QV9Bry5<ob>Zޯ_:\fnZڿQ& k~&7HD/BGsywsJ:_bTiuR`⼶T*ws2[+> g.~H`w08ʄfQr1l`uѽmfԃK]n\vA04zëvqչq#Jr(iS,⸥s%ͱDJ,l]һVj#sp=oD).>E[h~3~[>Ջ)aS*2)i4"pF5ZAAvT㪴fGTٵ!q&43jLKcŵm?-~qr`A[9@ `YL.&`Z=@m95X}hrN'r;9{%ZU$~B*C]LJ$oc~(I=Mףߣkbo{͖VVeHXCjS"XH9 ZJ 8_%&=5_qx >C/s$.jz$HYuorv];Q6 pjUDN2E׍ X$-qeҪ킄3袰l):**|Dk`gՖUNSRӚ7(SR4nP,K |3V )CK26"N>nVE!g ؄|_&Cf IRD)xʲlx6JվS49گ O#`wD"zgcv+ WH\VR|7|U rf }g4vե8n~ן1-AwH B&.eyv}wSc'krqN^gB#oKG'#Aߙl `'2"Z6!%*Ѭ"*Uf@|9407q$9Yc"|/خq4l~WLЏ3m{A'Ň|\5t#!ؚ^sk92H~@ĭU/Dʑ}2%Ogz^fAaκWFqY7&lbD4*=eOEJ_I{d96A .;v VU̔=<ʲ"҇$1Zt5%Z}fp n*b3ӹ}ѿl0"0Xdnp[)lx{0Y*k顑ȓ,(ƫ @8DʄOEn2̥. \(%0Q"akn`~q{\bclM]$ucW?ЇbݎdYNRƧ fԞV=1?}BO_Z endstream endobj 528 0 obj << /Length1 1387 /Length2 6001 /Length3 0 /Length 6952 /Filter /FlateDecode >> stream xڍtT.eHЍtwJ 33 Hw*H -)*tHwsw]5k}߽<"H>A~@IGGC BDll0  C@ B} $4\A1)Aq)  J (a?@z)!<`H1zpq%%y\0;B:B]'ځ\F;2H?Փ !PO750@ 3?oG<f{3}8HCk @_f*N!\@p?`sTH^x" oF~w*@isCz{\~( U%+$՟2jv??u#| {bk P As"@ qa 9 *oFOpأ?O7?-"AAf08P?6z0_%=AohzApW@\ϟSTD|B@4]E$藠VvO}M/8 b4k ( C?;_U_$T\\~90&-ZAV ywT B A&3?P BaH;?7%5e~xy'D {l(ZNCnҝ#Bm@}3 G )A{ѯ5 < _^T@ce7 #@I;U@U* d  ;UF-z9VuCP铮;Z?S>&M&p)XZ=yFY"TG'>CgN#6@4Ūt?~,~ME/ill^vM [}%(էbgVCkMе[*="dE;/n@:KQպǎ;i:v RIMHb5Q+8yxBDֺc?o#k"gc^9RoMbCԮ9]n%?)ί;y|+6=7v4 ctlLv̩S65O~z{;,T 53:/#nVfJz>ՄsO9VEGu(b]OzcGH{_+԰NOJyz:08K)~~ޏFƕQL)T/6VB-YӇOEqǖD[.5^մ: S?8goPXS`w?PT:Vӛ8JyW-/2(nAfՓW 5/ʲ ryި&X%`Z&|X mBmNryɴƃϏN>(f%OKQ}UsI!"vt1>>%')( 3ޒU6 >c̏:r+6=-~8 d*jxYUdG.8Mj}N1wGon%q6?ԫl,o^r*ϩQ+kCq4zC訢R7+ b#ó9Dˎ>EZ{֎xaLUj+WD(*R5ܱotlJN,*y@my3mہ~OR9dٹ{ʖm5ӚGb] u ^{/~2@P%U32wF_Uq%b8P3<+3GbȆl׭wZX2CF_b+_iT㨥g럴1Moڽ8Z|$*V f[RiyIj]{}(8%~=hooV>ttN@`F1>&< K$JuɲQ*vhz j၍I"&}fNjoNb*wj s[ӮhVcnt\AN´y8 k ֥FlD<lV}v|rw94躑VOmNԄ|ɢu<=Ѱd$dL?YHAܧi[\=[uZ&Q~9ƾ$#op0k^e;p_ W|>uWf^lxh 1U$X8HB]%erGFE}e \($u ϰj0v9=Gf5X/ũ^5iqpnf0Td5~'2鑈+KbYsfzي0.?٧n)܈Ω}'0&!sqaU$9\n!+Y{Z1w'W&&)'nrJZU5Q˵gqQX ilYABS!zwC6d8ӑd8\m[.)hy*)J-2HO'Yb& cyLwN_1)ऍpEJcvrjwy ;_|]YpB ٍrqWZCuG@ȇt Ϗl#KsLjvX2ϓ k {j_^(&2M {@~fdm«g |v<BBN$Ԓ|Tc*j{P|䭡Rފ$(_k(r"LU}_xzÉ";w8źQzp{gSQGB`j k\ ReEkpUsPxӝ~V&%L>gj/4$\kIK2bTKjTg-^ֽ9uy8WfnYOyaٹ36hcSdV.~?C繗پS\Yb﹮h%7qAo#MT͉hꏫJ-YZb8৛kѢϩ;fiywظѵ;_8*9?ԱQ-ۼde`TTu(\[,cNC}>^OٜUfІ+r6 YL\whifasR"ei*j:s,(>RCfբW+dF2 s^/^8.v̮kh7tn׹U=uDvQ(sD+^?l 6몎\ڼjƎB⬞52 FGL as" d*hlQoJ6eS*V:2߼Z*R~;`HarWH؋rhRά05ج}ڮ;eX᳓H,$u_5c7旅mU0M n1r<.9Å=,Wu~ )Q)Ts;M=*aCZCjZ}Ł.xdQV\̎jԠpLw(V-b#&E/^Tc^Ƽ[{jA\\Iʢyj]}a/bSr=u]|ceMT\%y'/ȄP_XP \"uru߭K{$9X!.qx`i~31<)ýZ >ʮ=bU"3* :R]XjX, L \!7̥Zg[ac}}|^$@˄,@xl!}?!Z7ǵq,W+3{`JР%Afj~P = sBTQ Ef_r̩Km_\zc3*3I, m`}l*_ָbAO ~!AXw븎<+bLe:{b.i|OƄBG2醧_^eoK0z}`K+=$W "15a_*J>CS1Y!ݧy|SR@a Td<=,yJS>"gRGr?c#:q5DMF"E'%G;ψrB"J1yюSnr$oZ('7Wbqw j-OrRweٿXV R澙EE}< gj\ "?G`?M8xx&3j; ,pj!PDe bUig/$g'54opȆkoKPҽ;{gTAZ=)jLcGT7ĥZqE{x}|Q(ٙ~ė [als`obx7T@wCk#]è=n*8=ۨ5)ĝ ]`JƞeaoY1@H:G%@ EJbLf6]*, E֐M njح,<ͰH>a ]PJQ*B^xD0wL|%\3Ɋ5%^g;B_Mz,cOڒς辂 G1&GoR'厛J4x4Wx!aJ|@$!J-HÌdi0mrsxqBh,4`-q^ 7DFI@U*VFHW%f<_SV8N/sN[>u& F&;+=:n-hڃ Qi )bYq endstream endobj 530 0 obj << /Length1 2721 /Length2 23992 /Length3 0 /Length 25517 /Filter /FlateDecode >> stream xڌPX;ww'xp%7U[TAmgm;&'W21X;13rDd,掖@{sk? D G5࣓%`fd=71@O.bcfonj:TF&..ڿBV@{s#k t%@Tf ...V6ԴsG3"h 4J k`;3zxrr%G{ $47Z;<%Ii-oc hDo2`bn ȉI;: X:؀ - A17 ) @ ~UYX hs{n w?WN *vN@De@G### h1 f KK `Jenp0p흀^*FC5 1o j+@4{L_?~o )-)Awꄅm\t:f6Fӯ!}oyh+imb-LcPԀƒM-@{ȵ@?_.&W!Ĝ,-RSQX[cZ'G؀^ZJ:A2;*(o`ZЁZt2:@# ڜ)jmdckŘn& @׿@omr(;Ao`8 "'o`q0~#&o `X +o"HF .2o"/q@\~#E7qQ@\T~#E7qQ@\4E\ ._40o=(obnho` zLY[*@El`F6 c0 Á@?) (wcKK߆@";42ÏDhel`#&!(_ [ :Kۜ헹y0 L~scffk$3eP@P>A-,c#UPGfZ6=6ɀm%c[ f zȭ3*LH;( ֶwYAtr#>Hb{@sqZ7 ?~Jjl ' z8i`5=0 f?TG?@1&9A\0orH@%mdj_(" h`chQv_%B;7KLMd&2V(auu[Fp㨹MHKBO'XV)#~B8:e=g;OUOɳ8s1]]kKVƂv*٥Jf"U>kΑf㼃q#}~2ws;5J18Cs9a}L١ W}lC oEͼYHIti֍cKL; @JFL=wrXz,&km&I\39$ #ߦnuvk#twI-Í#^j.LEʹWWK!Ԁ OAxL[нclٞQ^sh,]u^e_ ;qXDӃrDr֣)xjs% ݈/ X%{wU kY G}W PrKz= gHz,Lb=ɅD9eL!o⇂^*_A찛F_v{*0tԵ(HRFՋ7-T 7ƠIߴ0bOK)ץ h0_3-3*k1oxJ46>{bOȚqQ,O 1$_Im^mVf*v5g uXXmyJ̡ E y@:j4J͗)!(K}5"'>:QzpİWÓNNu/H_E0 }3uy'%B)̳Fwb" aZ߻76啷ꬮa1}}|=;p*M`W_#Ao,Z+Uiq^l=^ʉgy*vcy׭‹ݙvK3VޝdKnYmQa, Ʋhg / ,w,|w_2g4Wp~ SwӾ0l'"t[B ecUM`'#yXDvS h8>q?+5a{1Z'o2>u9" NruNmI}6}nXN!P5wFȥ=~,XV[)-bYvaR@ٕӎDxb{I0` c\Fݠ/y_y0:6% gIQaS1LȰưCnw]B1@ik6Kb٪Lq"8|Cq;TcmOfu ;rd&Higy+,h]CdX 6[3.%CO=zӗ`rӛaN./ʜM*D$,Ͱ[}ʆQ")qaS1څ@Eڲm-6,,Cq&O-Վ9Ћ l Al;ێ+oivnc&1QC7"Ҕm#oSkZVe-|`-KJ|B- G xR@* )c7aEBgKCMC}ӚPW E{;xu],z}Y 1lw¾ Oc{T[4y΂'!o<펫2W59YDdg02Zf7cd ί\@Ug1mcK_riLmyj7+O `g4g#9.$Q0Fڼ$T^h+4IŚlX* 1#0V.{V;]cK^wMb|  Ko9Q/e#)*һ+~sZirxʍlW!d!\t5&1ӹ UldM2t;RwoB[n ؔ"z{a02,& r>cAһ"NO!sRUٳd%1YX乾qkF:pSo!/=p~)8rH,cKD^l鎰G6ǬvQ4p,eNkȸZ{1t5n%Bh+$/Y5Wƾ<Е++ʶHqDW(,k= ichڪ  "5B'UDs^>JtͣH|T@\t$yML*x6@=Ԡ[1iPTHaֳN(K(_=: _^/K!%Vr)p7teAJxsS(֙j^.k5S2CfեVs4LCAXXFj> gD{o6 mL䓍覵o+'(ym;ЛP,1=8hL&݊Wd_3 8ϵ@jҫnG‰;ocDcW NCMI"( "Fx,81Ù1\?i膁Vɽ+DP67jOu<+D XٖKh-ՂgJH91b*g*|Uy`Ju]슥@ȯU9/MHƥÎIhVAm^<|hsV!ݧ(K(l`yZM#Mjct, FTk9J=OOu/G(|Jvԛ`$6 Q%|4.JQWz`:䞶d餑#ֱuVL13y"NCف} ݫĕM_,;lcJs}f2 ."y(R"6p8B}ZZ J(>3т`U<2ey&*O&o⎓Szm/ .[LNĔ;MA69 鬒“-)AY1cK Ty&l=/ɀK5(L1 >Qk,$]6eai LXB)iYM[s3+3r%F2o@oH~Fiڶ#b9*eU]/ [7|#}1 m9xc69ev\P UPPLn `N;E{@fP6yʉbҔ.pVS NѦ@Ǹ{lY;Bo1~&ϟn)})Ǖ{>-Y=pc"j,Ԗ[6T;鉧_bőf9&kX5Su^04{&%\fdE:g23R6 Gq36|e1R?,3/|׫eLMʮuCT2{dJKb}b[:igR"ӵa]qX #ߋd MhV:3mE.O_07؉q˞NPt[k*)x|1dz*άZ$:2U{NhSrrxW7L="a RT.C"/LWߟ$կLӧ!^'`,}U=.LDk+J]Dg+H3KBiJB;[aїnesv؝k5HWp*^Y' O&)oz|K/c;`o24B ߂}G){x$*sP(rDOM:Y䷠U3ߑi ;ygRK:t}!g伸/^h#I8ͱ*?O%Sf( 6E\+qX79~}?#zNh׃S Ou]kfӁ3%ƃJ+KQDa^]--y)17~=E%}"(vv_ፒzc5ou٦_;I؇FZej]%i: kgZPM&Q); HEh`{9Kی6|âˋw6bziݑ*q!(* )Oêz?!rdž~籈{Ƣ>p1#n9t#Q4B-ʉRB]qn>e7ǽilAZ\jn Y0^ R#z]T^.@3+OzN #פdުG,z\ 7vAָq1cj-6Q9%2'̨c@оK8VEt GZ)bOp˺t4:u^nun(#[jUa%6ZȒw }鵦ʇmk;j0+T5]`?obʒؗjSO2g󂧳kZ^=KV+1^!T"NЇݷvjfLyqJBm%щƎ0]0L7!ᰭ,2t0 fr^-=H3DۇcU>y˃_m] rǏxXz+\m`+m d lK K:ūx/P*C:DӤ֍Մ@:B4!!Lj8O- UB4%4#ֆucls`a \QEzk~wv L [ ? < xXFJAZ2tdA8R4ckag2 [^DmA|:}Kj0BΟ!4kߏeN:ŨrAMl}&[RpAo۶ƭCP*ך3#b} 5֒twY<&,l{ԬE< !+ܲݲV 6hT&o}y7tbnokhqLdrZNӴxN@&n 1]T1VuLLibGJVU@dxmmZjj&^/}]L#YCỶ%M[;9~O Fm>LKa˞deDL!qwH=f[>.a|6YWғy.U 3 DxTgJ褪r*oP"ha7"rƑ w /!=t}v)2 5`kƇMs Qg۟~^ͥY09-ćR914m9(bY^W҅3VrqkW>7X 35@C#9i#IyIu+Nb`nA]pQF2)ո4oNF:&|0J̨hz~Sz,xf -.7ߺTB@MbCԎCFTkB.7F, Q8{/ \& ~|W6uzZz,Kjqm?8%Fg%Xz6[s:I $Gfo4}o%3ȾL-S};YZfg|f5E+]za[Dv?Bv/.gVG'T)2>ze?1YZR^=eγM KmkD۹ҋdz[ 8ݺaq»뛈׾ e0vxJCJo}'4,?~yޞҞY$4i~#.ն.F5]ycYA "Jr2υ Wx1R N%>ۥN8980XZlB"C޸j툑5K#\up.ƕZl{h ѫp*uJkdtL#l4R73B&p{SM.뷞pbޏ60ƣb(TY4ޕ1pH,ϙu >Y׸3WhҚ;Ϲ%$t{cFxIr3͡ʊKDBx#?'W[c󙡍;SfUMxh:)[`>B ǏG%A5I ~jiFbO+$_6W[=u~Ʈ-ͻI~$n4R3J,6uE/酴ѿǾ`sA/pz#1Q}i m1&S5 N~q8TuF:)pu6Evg|0'̍#F+,ւ>/[s-퐼]RDuqi)[[];#2TRu?Dn܂S~] kn-ϳ@Ƀ1;0>c>z鴨JF``7A+j$$ɢkv$5)|8 mq3ȢOGBD)WmRT4F?poLح]/Hkzm|QO].zc “FȌBp]e$tKW˩,Fc,Jnc 5ʐ!])-Vܓ=>Uu&-0+EH .Y7}=͝U`Xէe<_>dҖiҊCrEϸIF[ ȍAp<8`¼u>W6_6j (%TRTTÔku"}̆iVQځlv4 ͤ]8#k4ݐ<,L2CEdVzlw_ BZU_l  pwڗɾgdQ-w+8λ2e 4oұFv$̤d DSVOU7|'mv:E%LosrX]n} N^ OO{н8 tX\<Y2/3gP b}1ݰ3/V"c#*!bԾ nؼgL|#]#+fR֦#YiS84a.܀f5oiTg>k`XI/>FV$(D8pSr|8% !)/VSĹZ^ Jo"> ѧxm; } =f, z-9ZZmJCVP=rhp v細 U>hʊl^ӑg9)2pƤ?lp1S _$Ԅwa߬UL`/Q %j@`h Za܉(^r!/Uȓ UBwq !*NTu2Kh:ptg-7]"`:#*Fxǝ6 .YB6&jHvqi, ?SS3=n/ІG#_VMOCnVs{HY0.hbAn͸ՙ"avj5jJAj7?ݒ~~@dE(GxWǍֹ|U! XFp`y,i-6{ ,r35%8J0ŘE[Ei`cRG0ڵSt=آ~Ow ojol>lꄨS?E %]KlPI}A8id|iB4r! j%9^ߕh$m6x:3 Dժghb|zn kRbjnpvw)> R=&Mtb^;,Ψ?r$)iI_.Z,_L^cL9Ob.BT<~!"BDZs$ ~ϞpOɽS CGpg8*)uksO"ߕA Ay>ÆӧEB^c4)[{zf3Q䳏&Bu-Ō㐷Fԍo1'/0#š"GDRdK QQi-O!ռgA~Z:5F?>ޝ' $GJ EaÓ_wo$ܧKeޜ"CQ=K䛥rTݑW8V8K.'c$>s( pNJx+(븽Azή=U(JF/a$/MFYXb4ӂM|ئG<ĻvYa w\C;yKȆUHItE>79O[({uB fz ky!"sɱOyE3`wJ?9,(w5cGb!vjߣeO89.Zn7b{K(1ʂekWa|-R4W~ Ӽ _ ְ㤾޺w'aW.͎  T"fErDWd*IMh<װδʅI?c jZ]0uɔ3˭x-85&1IM|t-[j˪7gQ޼5|k2=b6%) _f!lQ(KfG9L8>M'uBӾ0mNy mg@ 2|cp<*ZLZ Ulgtr@g|lvgLDv8NKW uLf [|ֵj_{O^2;mI}OHĞ f -,Ea'r zkun$a ܦ+ŠQ}!(|~]AFF1nF8eGҜO TE|»Y(4(3c8!}6[]]dAfp Y{{^ 62O&<,q70}]̳){ItT?.)8 ޢYc Ofj۪[R5/1ޟZ %?1%#yҸQ޸4{v ![Nkf&".7 !>Ї'b}L=t.ԉ$ˑsX [5;yEsxָ^Jxc];|喷pr&;tUY zdj#1Sls!]I1~3qC/r|tmE޸lעeլIώWڛ.bqSgӣ ڸ6׸ 3&?wFY^)և'kբ{(jy/y?[T4cG7bI1/T*IF!&ۃP{ʦ@&8} D2ƴ(^¦sNEDgR9,B"4ai/z!V 4DH DV/k,J4 F*I(j^rcb;]' ҜnRԚhC{iᘌ'";Ӹr2fbpSђة~#yyo/?J$>N+ N UIc$aBZ'RD=&7`*W{򋭁ŲT|zXa 8ӎ&ąu1.3q k^hcReOx /* T(2-q 9vRZaQmd?&&#?#ERP#+zTB}dŠHQx.|taSA2Sd|& <]@iN}`tś6oǶw~؂a } vfmM%j X!uc{kq0F=1N=¨ +!lG59[ &\*\7 E96*]hQAD$ܥqAZ'Eli##Jξv&4r>STrB |Q&Z˪7<`}QFE%#pLcJ!`O]M°b k;UcY9 'ĈY?f2h1FۇE{*X\A;Q_ d{܁wgWhE_'i3Q/Q"NL:Ec?.+wE<|, `;6P [ZC/I8BZ*jd?F*.$^?DjFj؍a2lEP#a^>SEV\ĠbP ,+UѱsYF￈=ObaKjC A^1r5 @nm*zf7p1FNT fsG<+XWO:Dkfegb6/ _HLY*Eb/OqŊwtƄ)ewo]91\ۆXi-nM[2% īY?2@>s}oVEFdfY {LPf2fUw/:Sj_WیEr)_ )b|%#@u\I0T뚃2%Je_,ĥՄ?LT2&YªpHĩ6-WfMbq{j\g5qq5Ŋ֜sc7>L}Jlq+5z?L*`hJ0xՋihnט):¤x7hw`W,F7}B< <%|;NB=A<+&(q˭#"0 #ߌ{f%#Ψpy{ltQD&ti|ѥ+vX>;O=$RbhN%y@1\wkWӰ|C}h +ğ2zl-\N46spY$38a}=Rù'Ճf nR~g,R4+ۅj A@M[j$XdX xF^ڴ&!*# zŌH@BI}ahn,1<1~9UN]e% +|aʹ lmjW>C-ȱبi#tS:,0qH%BE6AvYWѷajE dB @5YYRߺP4hJ{O*Fpn/f8kGՓbLQM SloQ׮ x`r|d1<2ne:FD '4~捩k7*ψ#hgs7,7=2D{靵r$/_.b3|5^|9WGlyG a\n! ~ׂJTdX]h^-c'R ƪŤZB xX m_DSݩ*7_ ] tm Tmd>P#/N j{)p&Nvy1.%ј%>(y˭ٵj&2vwjxPņZ%`ЕE>;O+ ,'ʹ5Ò$o3JJ/ݱY]IpSSX.wqЊNw>[ eY%MA(Ѕt2녅t6yQ~%QV79O!3w\_ht]bZ#=޼* d'z!U΄ K v 8.2(qA"X:lC6j.YT.+@n S%㆗6JyEH_]W[X8+,8)w53q璉L% [TA ´Gi3?vr/|.Q_̤͠躬ajȋē\;+y?eewUG 7pVNl>uGRrTLH!Lp9]Ӄ-rc7"О$-ĖPeHwLJ@0wap+5lR.,`l7fp˼)klfmOE.q p,o{8!mz+bCo }3-"2߉M*%yANK/ըfW U[b=̡_k)n*/3OJBBt)d!Rc  Bb_+yOKԠЕ';5r3k=,B^蟏%ԦCKf|"Xǖ$vPFj>Nh~ˋHG2ȹx [ }JY:=:H'f! /s\$N>>;O}rҠZa'Iy0;7ui H޼`~Oͧ7L~)$EŠGGr NRJѝcPDbOPfGʦ@O-dz(fN(0A B]? - ͧx7b=og]z fHbd j4_Vab {)& |[s_h̪*D$U{TQxWYŖG0fEP JfZ_귶ơN86KLij5dTD7y"+M ڴ2F@TRۅ3ѧ/aa0ƿo (S"x6d(ޟɸ[p/bX 5,zj/q$Y,?ä1|V?ɺ.+>5/&?|in 67e:p -׊倓PV$hpֲ A!T-7c`L UTEED<uM[[/?b-JEc0SvIjFḙ_͞5+7 .Y;9/)vz7BVzRpb1H"Nei@ڐGrikw9lL}y;}v`.e|/g tGQe1km|xif-OǮЋ.abٲZ71z*ivlNF { ,O l'Q/cEnؽaocD I]^|1\2J҆AZ"lC}&%Kc֘OȜv<3k}W(XL26(_vZ/xU34Z#+݆jnuzm0-qGm4-AU#<=W y["Cw+Q&ygǬFn2$|n=J; sK&,أ@u+Ԟ{Cc^z<+ 4H 6R9 "D5]qoV<>Y%(|Xg&M/uz!&$G{ [ݸ"ӯcaMwk܉KY7qQ ۸da{;g4wve|};w?=Lo,_!"أ)P-'t4SFSfa 1Bo[ 7g^'kەh QE .Vtj밳;80Ww.nи!|^=^E(2 JޯxT-E,!9zp~ݓgc# $d_e(=y}hz9ΠGtt7L m4븯,WڟBjj?wYUk'):F EVw3vGf;YdUPlZg駑ҧqBOt=ǹnQRl_7N5k;h67J/+gisT[&FSm} f]f= 0>8|j06ޮ-_Vйȷb8JVhTEjP1>Qd/"KQO8.{vMhZ=oH硱t1C*.?quE X0-B V<1 F(PZq2gmZ-DŤ(sG#J陼B\j NW:#E wz(FSNṴV9eH/?*Ӯ;|lG5aw ʯ`S:%}$bvseB~ulF 0b]ޞN30%=r. %1wFŢ,! OђU*Nvh&6Dd T^rFu\0tRV 2&`j$btcd됓Hv\uTkgc q|H. P4|\k|G1Ce]",`ͷÅ>6CpL3q۬`w1,o?[|km?SqS!O%[]D*u?6Y9Ѻia9GԄE{9z'BO3>ܽ Hd >d?~x/hFdr]g(LN_jrs.)63hMJQa>#,L-_~;/f2jVd ~VR[S~Hگ- \%Au,󍒛A4Yanء),D_ѣTM{G08W } u) whi4/)T̺mȂ.wwy@ YVEr]ˠ(D@i}uC*}! \Dʻq]p1僑f?E%˹ͻlw *A_cҍDܯ+S7B%t`zIݮjGi>С3Üb>GYY%' C1blB6g TVx܃dL nBnͧdŘaЇBxeLg Ӷ3èy"^AU rmx<"qم/27랛4b!MH+!G9Cr¼B$n1%?  {sJvdiP%:r#MyMYOS'[]1@®Q9{OGퟹeJiR10@E+mE早׍8p:P'!eiva\XHZMI7rxV6ީc%v~yȥm[\p%ݵ&WU+[ tJۺp+{߈tT!%l{ni; ghj-9fo4TVG̚يwo{3TE'wI`֔PG‡Huߐ)W<ԘOML+rnN,Lt@Mgv {&.jPôž{.,HyJ<& u':*}T`EmZPh 4`c(p\w֩$!b7 T!0Lq<_/dz͉#a>/7W$;CLTɶH)<ġ5)Fs5“L0X.Jґ[ 阮O旽",7v_bW_7x!.B)1pN }iXO'tRx]{S+l#M tXkty2,*"$-%e*#KdiGw+'(pFr/}d O+&'!zɯNMŦiOEBb͑@PñMykWs2z,-@>ۅbmӿQw4zq$x#q ?n //0(@|yDO*^dk~m7q_5<='WٚVp+17|`XN-*ܰXQ `djܚpIdGM[dsJݱ$A5K;_+Fȶ `eBn (]fe^nŚYUR(/ 1f h\ѮB@|h_pD!nΞ6ס}|D.=|!&7J0\DyH="MbbΞ)n<GmkZUKN$&0H KQ %_z;$Qigx| _H`.+U+o7~QΔ׽06W+%XZQ>{%+f4PS`̽(q3el쵵3Ca=*k ŊWivhgK-V7A]BB>Dj@$0v]~1Yo P>վT w7>_'Z6W R!X^w$4r(*v=7\73vftm]xQ+!s#q=_uП7;ryG|Sat9LpEHZ*n0jN*׽ RҝpUޭߑ.EV/_Z@N =!K( eqNNf8[t4.C ;{";m 6"6w۴2epdFcii-> J˃Kی^sҹ}n=s[:>#4jJ| m~#9Ձ/Ͻ V52!'>YN=Ӯ&Ӂqb:rٛD;)c 'TMA Hzw8yӆ2g3+ioX锖rGI4yH4+x,h5kǣFVGq/Wȶ;W_y,D1 jO8З4xV~c(w &b7+iGvf<Tt EEU9ɗ74"ՕvsnYcsh,1$I;Dh.Xnoݎ 9FZ{7LGlD$(_Zӱ%)PQio$qg}#M1x#XZ6qDk޼J_]c.Ѝ }[`Js@d3Ȫ(u\VN|p~* }YE1q 5|U1f_hEOmF@߯~9P`j~dv+,7(BHK37k8{8| ޒT)!.iD_N|E[ R.P-zyNgCrqe4ç(Xgb1G=ʩz'QIi9V+Fi _bM?I^Y]"eE+;s!Dvb) @l˞wWr.<-d7ă`$Uc=KLe&(J]@!Zc YƮzBQBDQN]ԑLl>69yW.x 2S_BکA8CCM#S C_#}R+D&)|Dbef8 %L 若~`G[N4qa0]20ۨȲvd8U[7YQP5@xꜧsLN䢏V>5Qc<R]ug^7.m'PBc :u+|di*lB*?EHMnؿ [ ZnɾۨS9kbE(DEVZ {negFhY\CƉ%V@ig3bZ $v/*vWxMbg |y2vrq%3:ǂ5ܕ3,0]z͊#7[LIf:lG_  o;^-2M9 1ŧƂr3ó[ _5\ƨC}DϢK'|N*KcV~8|UFl/H4=TIRT_z-/AWGb&|󡗲M]MY-DoSV endstream endobj 532 0 obj << /Length1 1694 /Length2 9743 /Length3 0 /Length 10831 /Filter /FlateDecode >> stream xڍT.\[qw=@ w8/nS(Jq tf̜kݻV=~T`t48,I%uN;;+;;' &jKB tv9e 4>ˤ̠vJ`= +'dg @ d Pb(8.(4Og 9 >?@gd:ei uA]h|lN XVtiAN@yLE(ȬP;;;7zXذ -~X=7YP]܀+ߊF(K`Q,Z ?s3,1_6--E}M]?;['!fpp|<j \V?}> Rv|&-@ y-8Gq qCM6s{eYW3:?wV h ru_& ;(^ !h<8ht):c޸z-VE/dъ6 (3Ϟ!B!1z`N_]O?Q($0~*_猽Zt&&'$OQ .)ZsMy9~kGYܘf\ O>*zM.ѡ2;tYaC܅Y%bWϣ*sS﵏H_n\O #ɩ/ر!X5XYkl0&kߴΪs8XѤ^>S w;b;tst 2Iil ޯiKΑBe"zUMz-JO:9+#QuĨn!z&p LI2? Y1.6lc AU G c.N X؅Xˬ5$0єPW<ʋ!1h{,~C%0ɻ7_6+ͼFS,H-qQJZ#n񻃐"Oa+h˭|W"Oː=r ߸Aݫef6}s 9 p܅X  @ "% ?fҘ D*U}1KHuEC%NǑpa,V+UA0T/l$K %nS68_츬 v:~enEyJJ;ZAwGm 9~z`}FT>nخXqqF^+Q2I1 aA}y֓yʷYT-U&7l~4Cc!qZƼ<BT.{HK-劑A $`ugӉǺ 2S+W<dO h='z{փ;~Zw^eFta΢D(n|g'/e{Ѱv~^k-9 !#WeiE ?{\Cޟhl9ǥdh+KaY,M?{0PoP.[Sk -ޣXk( Nk\%V>$»&w pEHk)x|ƛvJTX(wg@_%X8KUd1V1\b?n#w&sa[N<9eƦ} ` i\y60m^ūK^`Ѻtxw >1} FR%Rl#q?ԛEӠ=q=?O$`00iCbHv Ŗ–"L 4nk̛ʉgm>;ozosm034LFcw($퍍F}X&րObs}"ݕ",T?#VI.cK*hNU_#lT90Ո?,C3Ƹ [iF R@f@I[ OOg f]ҁkH8)(1?٥,5T"eG1V$+n4(~=+:Ab9dJe`_8I*l?łrbw_s6+`KEQ#'f%Bh B/7c^iمm1hnhpD\-ydϗAƾ\b? o6RJQRSz}¥ /] 7eS;FM&R<,=ҕ(\၌heC.  XYnE>OguѶ zTz0C$KjtXx" Lֶ6P\EЅOZtJwk'vV) oҍ7;*Q>\=L [ո3 z`]ְjn' #ܧ-ߙϖu 2&W*ѧ4.m6gU;cMf^8gNڀrqbxOFD:yhTS$f!03߅g42"tm hTKY'Si CS}mx r}rE*͈tED(O*=^ h~@aioThl䩻YBiT5,6_zsb%.IJyAn:y8kzZ&)Payؕ;-`qԟ66gB(14 m"ĘC\JLc1\~Y]䶆xHvʧ>֥/+Z.ekxA;vp>r!6qڑgI[B+eȰᵤ=aJ](ԖRXh}I =iws 6~ Dٌ NS{<1sD 4bwTk`3FvpB[IW<:c }Oߑ,B |s.gcF'EtgQtQîS4uw',Hkr9?~v2{>xOIn8EM2 a c-5]%}#i'3 mAmr< 0XƗ(*nl? _ CD$7REyV,DC:KY]ZĬ~U+g>̍&?p3 @'B g  )_EXLp/#7ZVK5^WU| NolQB#3r(Oc@X}u</KnUF.``XG#`ܢSX}B׺wt+za®R.z*Ԍ.z=Ұ<氺]O@su۞S?gT!*|l¹<8mpK{4zX+7_$rɮf֖ph$RcӇ36Iۛx0?m|O?%:%uIҶ5%c;PKE z~Z#'v}IǮ"r[1e#V}+A5Ay|4cE|Q_kÆyf;!Tolj_U"5P7l{Q:鮼N Y`[*c\&O@jf?]Ɋ{> rRL+U~xwtymfU_%%*}˪# o1r^ԪEF5v~#J,02)<'B X` 2Jg9U哩9ڰ-UmŸVqbjX(5Kg̍ŽqG s>."#P;#(Lt;?PA*$(L#,"{[u *}L.u_ 'عÐ1<z44QRU+kMO y^ޏtYh'L(_~A3zFrW\{O\5/ r(|-+e*U4Hoqasr+_ EGЄc MɁj|unWb{ F=O6!= Rٙ_:z՟g6/#o*Ka}?%)Ïf_u8_8dvYH C {IW2X nXH y"!vA)VhbKo|Ɋb"VkLu'7An [;4uVF4q*Ѐwq;[Vvy8"ó9!Mb&u[O:x{% odDb#Jd{<1%5<4`n ng mQ"ao7g868@t{ЦR\iqP0k|Bi0S.ԤqgUŋo7uB sԹZd,M%$Pb|3SQ4Ս\pSml>Jw,$hoxw|sz\ j;1'53G\Z.IREm%7TVD0n@K` dCu<\T ̀p韃Hl58OT.4}&\ӽu"5k.$0+Av}1H Yy6së:7ðͨpyj880RӇn\RmD2LCrbJUkliTB[E0Qeu&DaӳöOo?^\I(퉴qzAI.J3Ms+`aF%KOֿYvp~/yWKT&mŖ2MīEŜrzuc0.,h\'A@8d-\XJ,.Ӻk{Sy@]Bx-iq>ׄvA ÍJGPVd,%fo̎<eB-PU-FgXaQ_]2%u;qN& #M2W'y_Hȶ>QGPᙌbq?%„9#V Mb懁EK+ 2{ gc E,>WJμȖض1?ݫ9m14$S³w;6LP={5aoUq!+ ' N5Å\g_d^hd 0 bvF&MugQ+Az'ZuǭۧPb ΂g5d;հ^ lHi${M 5Y HmPVm;M1+)A฽s'BDbC[pjQ_PvTs?dʄ4AJϧgeEWzbpߵ+[C~$Ӳ| h1o֍}Fatĕh}Fc $uKk*\|58!Wn[MdaR߃gO,cЧ>+:C~.;KH*}\ |ato㧙J*=J<eP)Ţ4 81%بeTLs-l, Mj;=.ojIgݔbt^*cWn ﺹl`J5"10?AƜyvF{Ҩ%b@ 3M{xGO1cPT ]uWVZb".ӪfRUx3_:N?:y2 {YͲ`k*h.y|Q&U+(IDוڒTE |8A[▲5LEFsOU%Ⱥv~sO6'S?(q>:]ooxCw.&|LFChflvc{@˗Q7b;hC $Z(]}0J?i,~M)A,7+!9Teٳ_SŦޅe{ w[5EBo}ܨSѕR}a9Q6iA #[i^G)E:ހpƽ%tzyd+%a;dAUh 6yv t#ZCw!{J/2q8?f3Sťᩐ^Yvx%@V 538mDcȅF[GRܤBے\q^G6)Lmێ2%ڪ E 6KVNhij)d>pxPJm&M:Ͱ0<1eŋDF>+$u*)teR~L<'p(UC5h ?ӵبIhaDWeN|UIɕ.n+p>yŤG2v*ۅ7#rq56-X}F<92z)K̒NMtɼL S#Bs|5 ^Q"WaWn߾:!5nzD= è#+>,`b!^Ib̠SĤ{RaK&JK{uSӋ<*͚rк\9Tr蛽ͳ^c&k%Yk isڏ Wkg[ V i^W._rWJ/6a`DlBRu&¥wv J6~JS)c]X!?:6t:Mq#}e!y=6F$5vCڻ@Ntbv&L(vNgۆz ooXM߈|XpnjAyQ'H)]NF/E5)`K@BىoxSZbB`iRSmԀH54tݸIMضKapǺ*©rq`8;J% 0/ZK7q]Ƽʟ*"z_Uǯ&fykyqo^1w)˳CZAz"Ԃh72{'l(2lpj4߆p|hɏ1vA$4dԸTG5_Msd-3nQ]2fa*7jv]0#sYƭBiC呫MXM$:er9O7ԣ $ Yu^!]vF]?*Db--~%/ʐS%yW+dZ(fZڪbaxِ8VTΊ^k^Baݴh%'NjCR=|*Knm?;?=U55sV9;$:Nz=x{ #3wY@(sa69C , \Y8a}?Aj/%=W fm4٢v+X7^FWw-.ZI]:NA _Ep:`xJ>} bP]&ҝK-qdZ;{Ja`3VRʪd&Oz;/^9cwJ}(ȃ{a4YlWJթ/?BΩC1L; $m5'%;ͨ1<\<\)k.svTˣ+ᙘa]Vd?,N(X$dz"314,5YTamS9i")9A=XyK A6l Ɨ]GԆM4>:SqGy cΟqK6)9&Nhovg⵳_D k 8+ ˉrEilbo=dy}9cQH.Ogp c**gK,R 1 )"%])!m6Jdzz+mWxgr+~}k[ "p:"Oa7^_i Y ;CDVoW3KunX35e|x 5pD2H7+&o\|?8ϢfcK|Έ!x!kdQܥy?.SnyTqo=J <݊êoc3$IX0PZ;`ITjFΰ Lmu2PJ"j 4T^b0)o!7DD 0~D endstream endobj 534 0 obj << /Length1 2382 /Length2 18460 /Length3 0 /Length 19848 /Filter /FlateDecode >> stream xڌP۶C`q%44,w'8@pz{-{c9$URe67JہXy*ff6FffVDJJ5K _@'gK{;EF &ooqX8yYx^3p @K3 +rrx߄04vfb;Yt`?Oz22ɋ hJ]L""/f+3xV\΢ddob< v`;=xb?l<M_Y/HmZxWx'ju\偦.* 2a;s4Yhd 2רk]fciTwB7|i8o|hv&.VN"xbCS `bC|fN''I'IqD7IC\&?`C&?`C&?"ZXkQC`-!n?֢ZTXkQC`-Eh!?֢kqFgq&!vi`2[.T3KDewqG`G?5 X?\?\?\_ML2sC;Z?be N<i;˿WO`KOopqqG~?ܚ4܇?blcl[h߉L '?v\.@p7]AVp?nqL@n'p@kq h`olUq_+Lư;f*-ײS#*|mMfӭp>_47B+$/^ǭ۟ Tfv 2 y8zkXCBvP:p*c޻ H7 .*p"=2DGQg>b3F"GsV'ׇyJ5V< ϸD73T^"28^EkCKE$9()k h5*vu],;I`j!̝f,'25:6k A )cMvq.LhJX(>gL9«_WqKlܳ<տ,|ÍT l/mړ`rm!,qs?@XfӤ%Ԥ#v,C꯳R|%BUhi;t vE;CmNoo3#߳ k Vݥ"-+h)40|Y?= K-zc $].KޯM4SQ3_ϡPRP!8WGA/yV${k\V¨0Ɇ>B.))3K{cw}hXcUXZ;vLL+_61qMT>I3 eB]{Mi2,Kb胼~-AWZIrdK/PڛwƜe1IBwl f5^~U@)źӵyKu+ 1/h!H;vˋy>{7멒= !c_ ( S ” W7}j☰xe1;?Q̍LFbCȕ}Ś995)S1 Al3&n~/QCuJd0ƔwwqG#-+mt-nn`lH lr_ljf,P42Z7Ɯ[: *"|M${SI4Tԍh}[@VJÏ,y,v࠙6@n`[ 5Q!JF~LUkf$o{.甊k+w3;ȖDjul%RgmC~.r94Ddv«~?.n{W]/Da%FOXxldD@\W ,SFLⰃX _ GkN.zrCASC|~)>`rWuc{EEuX'mT趆O !78ETeٞ[\1J,"pw=߭}pnr[ɪRe:#{)GJ_t{ԪU毚Qh~:N d҆/Q+7Q \YJdNUh& +Mb3Xyci* :E/ UquFW(>ħr7նB-pF  $vQuD}0Nv}/GNh!\!ilq Ԩ4$RlKU[M:%@-뙔]Lwy)[[aqOxhl|L+7 O<`閣m%`Q+L8xd&V4 b=W|AY $"}Hi0o%CVhLiCQ9\INfF6 K ǓZd>Ctj/la yٽǺhR,)eZemFi]tgиdH-nOo_A]uۣV&O]3]3^HHբ"O|^:Zx$^ W.0 nssT;$U 6<§e}yiȏĐ.K_^ ZS8=R&$o>:f]gG0~ns*x4-=tCPgɐwpãE>'e<Z-&'w 'Woʵ]aCy4v={k\])/pCI$װ:8M*"y&(-,Bw#8Yv_M~bP }RñO6X`(Tk–}n ri,dț dRG@GN&wrIߠmlkjN{)CD@P>[#/q ?0rjpS nrSo%mOu!\]w~ЊD ҬrU\G]dFEu'm.2|LAj@w6"77ɩ({`4}`jT&hp٣JiMӋI#2,{ -V5+zqHɂZpt/ (D!4SqRTqÆZ zӦ&ƆD9l#{^P-B>i+\g@mY?FҕmWaԄK;x0^WgWKȕ~NWlht!$2 ۄJLqyVaC߭x#bB,%MbUN}OL3^2T0 e'AR^zo7NWTl-P\zcnۀ,Oc$xg?&<ȢEZuY;auK9uƮ͸gϠN Գk9d׏rنϖf&Ē/ٱ|lၣ9R_:}xvSxBdKTTy\b7*Y<ڗ]b- `J'Syѷ?퀚N :{bhZYs8YՀ;-j^fӧRQ9)}-T`9չwt'S0/24! Q۴=APXEk-^&!6\.@Q96'ډѾ?^~ಢ'fg{!8Z~I*eQ|2ӧqH(I u׍ᰡfvtfCq瀗ᢻbaоuն.+'JГǔUmXSdw{wX2ǟC]3P̅+M>"aVٽSbl$fJGKS' 佴j/x F^t8}{dgF`jܓM~kRf* İ%~u%uS!<\x]I'Èv~cB}MĜQࢠ-=+UKk&UIx}M`-<yy?Cm V0(mcx(!ƇX‹_9] 7Ll]ZBxR]h;9Hoܣ(ߥz})!0Ifc{bv*=S} x.ty燦Y#72`SK)k]"N]\4H\J@6"ҳԶ!i~Q g;5eu>[bXs .tw𥤫VڪО9G"Ê:&UNSoLWBGXo'H"2bcǤ{fb$ck[2'C[1/ӴrgRW vw |G\NU%DztS&'Q㜍a^a}.jTdYͨzn?‘+G<[{>“Q,u|H@2iuS,n<xXr'^Y1Mҏ4ly uQ3Nj*}eQ*|R A>yŚɞ|.;[2uHK^]Y[ᑉ4"bYӪ+!ġ+xI7ɇT,ԵI~BȮ Q^-S?u/#ͥLɃ,\<_cg%8qFE1WQœ{Ys0ճ^iK@97 V^.{7'$6T+FU<[r'$$S n)"RFμH|KJ5kRq{D"RG.^+g*'\hdf! stN.'#6\̇x虙$!|J n i~>Нj}n(HB?!q {E͏ 6Q?@t5NlXPE: {$hN!?*yO1NƄU#]-^A6޹5頻VN'UMA.*SU['/wXC<Sr8y }CwaP尛PلBF֪eJ4b_ԞBW~PiHԦvT LL_2$z Tf D >8`WsXr^VD.Qf_HRwa != tͥRY{AΟi# sO$$;ڧ쇏n@Xߗ \+PB ) :+EzS#6(-4K8mme7O:ּZ&qN~>K)|>Q wa?c ۥks~i>Gl: JSFzv:DwW>AmEΛC/~NЮ[Ͱ}0o'"'b,$$˧4(ס'@_)z[nTZxG8n3EI~R]+FkQ+#_ܭb)򋺎,FO~XJ,VlsaTPɢNy]ˈ6"^j8j9y54ٹ;)Ɲ{b 9'l>fv_pI޵. '<`ݺ$f{ &v-\;*Pn@~1V)V/LXňLNxA 9X0ytle&Q7{198Ǔ0$:/"d="Hb JBV8]&CՋȡvQ׾w<ΫT`Ja.[ms%u; +`%AaBsCkȾPx:fh߉K!Ey/^`e-$(͡d9/K@l,%=MɿEw-]cݾIY]wee*%^8A~tO<_qXWbaS19쩐~G<\ȋV]+s&Lpw t( ~}ÂNR0󁍿w]D~a}p]^/޼ˠDۢ|Q6aUqXV0k4-AA# $)`ef%&po{.+g2JqWjC=&?ixZ.Heod<'g-uZCR' f"-|'~ɤTg'f˼4߳h|hd:=4Kggˁ}Tz2Wynulqq7LJ7+V'o]?(3';eIJ- Uש %FS|v+|֑zrjƮ2kWY8נPfֆ1: E|^I12rk%GAqH΅;eE #5[c \OF9eM3b\%y}U]"a(itKB-=,dw6|B#@/8(؁g'E6Ll/y(:FE$5=֒X%tyoh>-˾#S+g5 let>5ǭIӄM$6RJ7ҹB%phnMԥۦbBCr5ir7ڣݩnۆlayH\WTwJZi1Bi7請Y)%Oqg hc[ a95c sMfVBOn9HLtT.M85wPjW!L)KTN۬R-W{L Xg\xdN{, 佲И*N{^Ppj3BKfAܛjM+_"ha˾3V`@+z95m /H߰nlxڋGpJC7>E;1Xj'TzgG#Q-ͺUkk{sON{>05a J匠.3+@gdwlā⊡eEe˶߾J^S3 CҜ$|u-ğcE(WC߁XAb?) K *ÐQwf+?O(oɦcE8^ἑq*g󩹔wd g؉_x"i1Wj1{,ty,ډs>U^(wAįt&XC$U#./K$@ uj`@:_E~PuU<|>>gH/94†GgAwc2tS8IM%~waT'|[иCP-#3Xcʪaol FO:st:gXVk:"11.Ay7qy8|X0o{jD*gl|EԚ4եBY+^|ZHtƆ I~rL'?DNHH8*J睄#r/RJt dLp&|EV[ ai:1Odtxʽ|YQ.`uIM0p誈̪`_yއJ@7z: 5TcʥָqOѲyCvTd}26=La/Tq͐BHWr.#lԗXx(/ёe͇’,4{f]AiJ S3x=ےW 㷾ѰdN}AxײTI2_i*x` OtvijF.8b!7B`Ha.LDnb|U͛Ӭ3#Kxi1=:#}dɭj%G9Jف'D#S1g3 /w,KH܎ Q : {"]aFJ]*aWR `"9^E c4zTS>*OlJoZe?x0:BĘzGߩThcki\} Lq̾?zi&ES}S#ZׇH'xT5Ru >#aJ^xteLpfa|*"?􉙽Ҍk[WQgkd}NyA.㲑 *cO<)]=&B9beg<]S 4Rφ~S0 ˜=}AwhkP/Z/ӥD'!MSE26lRLV5TGK vWb1RhUd)> 9l&sFBRtL3Ig)4|k6sT`L'yIFr| Jpҙqt>Bl't˻h])% B4AA>QuQxѶ.[*E4[89 _Yo (kaJSN?#6g`1q"t1,;׬&gfGL> Cj1`kR`dyEQ0[M65mAܱB+UB.1˜zc )K3c[?bB`VꏽRD30$Iq2E d>3'oxA*Q*Au%D-ǩ%EA %U(2ZRN﨓)V ?oN 1d}7b |?g%sҨv]4yP\OVѽN3 zV2(ɹ]~¶2mY /x5c5aDS9whlT-IcjYM;|j0¹EAّbz42g3# 8-J~hJ;sj sAuqL5ƞ_)CAlӮā4NVrXPBwaKVٗt(+xC )ds.!Cg{*sz r|5hcsdvEcoRfqy/i"wcyLɍ3a?l0-'FI ^A(>U8gAGPJ8Tk0Xy FzNBi+7g $*d&Mh3s%IƐ|~0z6 GIo}'wď),t2Y]BRz/^C/4_I}7d78H/e|J$|m76ӊ4->޴y=Iu!Ub;X&ce 3?{1)dLVBB.^@1nZxjJ<l$|(,)x>ih}|x0so 9e:L\9FAhKܭo|Yƣjl0fL\Mq,x/*e!;)L jHb3|7D grp5]\ojͪ~~'RuhJxt~eK=yKM"vg*d9GG;P1FڈfmT9rxb>//MK4Z39RpŇZ, Zw=Zf1FcZ}хqE&+eۥ?oND0|@`A3mJ_P{7u2ڶ<'D')7e3?_KG&o\l S9O8sD*-&X |rؕ.WҐ[\sm>` =ѐ-*SeU('mK @$׶Zerr4i4Q$'ҩkE핢#ԋ-@ rEl^:΀Wt%-Ď_jE+۪|tDG'cۘWx'kzy e '^;QI>}Sv]Dm}sPrIɳvd-ȿKoS6}%658ŨΠv]uGEz`i\M2C+7i!֒uG !?rp.irf)dԞHM@~iNXu+RLֽZ=\ѬcVKkA[uZD_D 0Tiu*m"ެegmTů?PHfB+bG0RHuHVܞ5R8EDWʽ brVE5 Sdrf&>Ub-_wngpt!^O2SaQ6QwqC"YUe)VbP._J,~9NKx't =pM KJ9Ic'J(sw#Vngr,)e#X#7^r|=fx܊8.Z09,jHN9^vm9nJE,JUO^Ⱦ INـ *$$)V rtOIj)j< 0y1}zd^Ejv',gVVe=a1o)˴p#v7mT+&7"w( *FES-I@Y>Zd}GNGX"2sE w|nLKuH7v9/YM|I|q[<v?]n`kǟo]_\.%Z; Dfe*75񒇉oΪ# Uk pת*QKҺK kNDmg5ВA0yINy MP4=*@}zqhI[d@7o Wʪ%DT}Zs.[}DdpIHW-T$7t}2?s=1T 5t9sBoh{P\e^ۆ\GX,^QМ2~12z@!ICeSֽq@Kؗ1X3.hQVjW.z.U[z3@tmPXKr _c}&0_,ȡ9os=b-xS Ս;xJ M@\Ʃ-ٴG{E{*S3A1p!a шyGys^. iTNBAN\s$!ȧZ(%(jXT[ކtYNؽLUiLq>#A J^d _ap0y箙]k{sIAc'Ϸwفè꬟ElLE[¯ݛE ,f0B_M :#/Y%.6 ݅<2g? "Suc(R0 )NOUE*_cv./h2ĝ) A8ڲ X&G~oF8܉;kD -[ZSW8]IZk[wn50)p.6f enQqaGL¬ ]JHV>kٯʮ=jI8E9sE(X fxBx]_*o?>%L[]AOo) 6dlhh~^3PgoL 4s,s.o\wdn xazG2߃O8\e2& e&=qn|OZb{ohQ*ZQi(̜B~utn`}tM[(T jUpR ⥕Ądh| )/ߞv0V#>_)Zc$TՅ6L8 A=3$N01KtQ׊80 %'%$-IK P`7kr gc}~֍p).͆ck92 鄝D5jxoC Sj( O'Ԛׂ@r)/l$t*^kD|'.hw4q6օr7`'Kj ۤC-HY ఝՅ* FEv;gv&œ}e-sl,- >Hr͗ǽ۱aW,5OsWBBF:\& tr8hv$N6KIM`}\>Z,CE"|lB#yhkl7I |Rc/FlQ15dOT֭; w0.M=Թr^yel.S*FyZ?+@CgI5̎O1]G sO)P [77N>vD1r*ßgkiDM|ڠ;'?&Mv:SP_(7VSQJ/cZ]~u51  IFSbp}ٜ=D^D58 ^D #^f)x'0.gmGCpE XJ{cf5G)/i2#MnjRo4dd 4:!ǘ)S5LR_ҽضuivV|-Mn..8"D!%7d=K:1U4.2BR,y[MdJ Nr+-Nzj(GuЁm=M3u1ݭ>5?@D/t 0Jw n̢zk4Y({Z,²>o{u0 h ҏrA@'}gJˆnJ4\\"]}l8 f7Ciex;IޤzܻM>rRQdͷg=o^[&8ʠQ|;UT<"WOPStKksAǢh'jM_Ґ=u%`j 2=0L&Ocd@YFVvM;%ڥǭDhV/BKuDqv|6kәq;`Y4w:yRgJ!Jl`OfͮLœ\mK11rDsBOIB3vSy;t[9xZ(>hѕ 4Ț|>nH{9Ěnw] -fsՖL۠>N}ϡ'yqciI!qpa^5!2ts뗜Y|ɬw|3R !m77JPcuԵ cP@3;}xkⰶO:#@bjcv6l=53tBb>՛;B(R(k$gm &FI#ɕc{-> stream xڍVTS[F@JEr(= UPzJ !@$B DTQK&Et)JU鼨{kZ9{73{fF'AU}@AfH'o 1hb01h@JPi9C r`5t0h_Ek p@"04ù!P0OG"pH!p `(1 UAPGp~ Pߍ37)" G}hg j^g?"_W@ŠNWDH`AyЁH+DzbC;ry`0?Dp]8 P6`Ez||:JCdu*Bq>_!8.H˯}h/B[/  ,@xWr@/oL?\- B. !?w (pFq`F&E7!Aȯ+;1hkke*C**/*.KA(T\!,B ߱h X-`/02&-D '@4WYoT?P N/}} AtD>OT#h@KDh FHU Fa|>((*CoA?TG1ο%.% XX 0bN C 2tFf0Ccp^~SJ`| $/Ko&41_Jeѥ~a% AQrlx.qqGD(FvsθuA!% 6sAO}ZlXHv]|ėH|j% [LB^ftdړ[^R+ǐݜ[12ƚ<-OdT_= {]^~j 6ޤ$[ZH0 1N0P@6edi[H7LI9YDǾ.]}>(hjhabwCզOĔWD?퉆_IF+ZF' f'fje")ёqYmJx'*MO1W0?;dpp4':a!j#UeX;^x5dhԒ3 |gsyDV-w_¬qJa/uc~UAS2? Zerw*LI Fl]L}

/(J&:}5ʗCc3kK06/ie@(J솅' wEBϊ'<4;?}ӬD:41֞ёp]6ꪑ{ʤ&2B$ktTV05c.ֲGca2۸Yv_N b(ܤ՜ (薒}&U !?yL29wR28e!RfS.IEWZDٷ1/S>qK^TKg>D&}PƎ$Ws%2.z*.u*_"r{KXګ^}<-pj]H톆Y1YM0W$e< |C}Bn]YLBZ}'?T7ە!y. if=Nt6'C@x(}!yӡ~B_)juJ/)~^ZC(㳲+r4>F3LPl DL2y[uX*,l,]>F4%,fNt_| sH^kV\,m7͐>ܴy*}320XO]Q<r1 =N'6&Q"T_kLysV5Gw1[GJ;! ncqW7+Mʡ"e^J˒Rzn#~v()!~ZmKuk[RK7帅5Uv^jЃoEdҕqJ{9)a27LZdFs߃sP!<2+5ٺoZXeo᚞kk[bunĽ{WS8tD>ިwKmW[/=jhcY"/%r@yvn jKl9kEl]eSW=[@vRiTr]"uXJ_n'V`zx~%%fPz|ʁf6hJPCHhOo1`y?n&}h^2bz'zJJ<2yV7 ]l#i#AlM8>BlўOpbDb;sϨhn-B{b樂tSsɾ>PƬƞhpЍMMԋ /ݭ"ڝ9{lJ&8n78)&P{;j%%LM?zߍsHشptNgA+:^|B oV1MC76#;-eYgN=i@kBǜD5]޸5,#XTIAoI ܝ':]Űpgc}e:/Xݺ2zg?xh^vbYW]"s[eh2uӝHk#P4ʚ gYC:_dq$0Ĩgv+}B"`1)`uz|UF>=3RH̽l^BiԤ/W.KC/:>#;i}~Ӿo.p5Nq!0.9 ]fA?gδ YL>;"bX(-Ui|SM|)@Tm9V,=7-Z'Zb>q__+ϊF5\L.r%E}7c.W$n-Y=;F4x JtΣtVQ{sHT7} += qP#J)ȒOGd^Nc-gNVql-U+'`ޣK:Gw, bf[-#GolEQdnK<|sOJ8Gm\)UYN֚OI[әx;nX+|SXct_f,mS9FM♋W̞@ [Ո̴ظ4NJ]=i{YZ|aU:X]_IA7xJu,2sJ U{iۓT}Pu9k݃"Žyo. o7+z+Z &-FLTbꘪ \4}ϣ43t%9r>@ps lF+[tS,3 %͔~3KSY$f𢨥Y|i|gۘ`OgɼhWt urfF_s wLKYD͍f МĨ_~|:k(5 Z|Ʒ)yp Ow^!` |mv ꊍmU55Uk;gWHwcJi'εimݢпѨrސZLz1^/[§>͟t__c8ZzGM{q̱^)*N E)!"΄ VJydh7ޖSP{°aYc0# Gxh 6?^ >M^t3K]@9O+r=B$|7E(c3Jy*hQ;Y}藥eA7^è~MD-p3$9 x<2֐LN\݌~yQQ .>D6V)d k7J!rkXJ}ClE9^7~IS%cL)e-ܟbLPRvY|Թ2XQK5 k.c2bWlFnkVZA=$'[ufen1U1;6އR iZ9~JKmͷ5[u"\<֗ה~wqQIJ/x&6In288ʋNQ |U%|8R "Fc_"C[E[o'E.FLC[O87Ih4x.爠:RΙˬ,͎1iaD~f]JM$o# L|V*뮺 s9Ɨ^RT%sBL3*A4_8 f}|Ҿ"J7L^< yQ},t^®z]Ѽmi(ҕ`5gff6ߎ(ǝ8A9Y*0Pքf|QXFs[TN[*J|e5||Ymˉ\@I2c7/TbaWVgR +&J|VXo_(U b|sM}E N *÷dϑnP4VcQOYkS =nv׬]ULHuޜL snT]cλK1$kh`]-YTo]>"Jf q7.mkO='VG].:?G?]hx{X)V5pN)h%ʫYM" ,߇z:ZZAf% cWeq3$~f)0)q"(/IUqkwMC^_N"QB!e^$]vrâ\ r\iĸy:#YKb} Rlq=,k.$nϔ n qb^v M x8VET/F@:{S$Em1SI[,(*LJULrSnzi=Nl- Gb76:+w)w 'gm-Z?* g>֣%s{JKřL[,ᬃ@Cwb$>D_]|pc˹J`izmnQDVvr,`I\ay'S-a}lV{օX=A 'z '%o=*6i=wNJ9Z7;.8=A箿Ty]:#~=XCO4H^EpTZ(X{ͻA%C'J_bcE}#XH$f7q_),&3"^f݀'sqe"͕;6ݡ2du=7}ڨ@S! Lw}R%9/=?꽈͗1^AJTs;?ERTTu 8( endstream endobj 538 0 obj << /Length1 2672 /Length2 20878 /Length3 0 /Length 22399 /Filter /FlateDecode >> stream xڌP\[-ӸK,xpk5`A[pvU[]սǴ5\c4,掦 G7 +@RI΁DCafۊDrqvt_dIÔ v '#`gpO@ Ppt"H::yX[ZWGW:@bmfP2qكW43;Yܼ'+3 rx(؃jaeY@ws 6@]^r;XfF)߅J613sw2qvXXہ*2n^n߁&v|k;Sp_M2o&ӝ+~Y\ ]<o$7>  `cupt,]~'7Mo`xlMIb6?&q .sQ\ 0?E\ 0?Es\ 0?Esysy& 8 ,,J&Nng`^&nL p{.&f [_Q>~8 oxU܅5 djL0?/2@0MM\#?\_7@nϚ?.Z `/ XVNV EmnܔͿ x8 oS>{wC_np3NbN෸  3kQ9"x5gwGE;{5> xT Nr[ǀ<5DnpW:]KgY[/1utW x`fR 8_\_M4!͏W"h=;iB7@)8byvzU, ݭyT2mi % )#ֹV+&$8_u2 vBjB]Rk Z_ sEk.)3x-#v>;jw.4cl81krI(2ϗּ/xL]U_UxЌ/-'&weԗay0{(Dsv:,5@z^> (%O˔tCʦ:U1o$NNt)o. 0O )OL{ȷZ[] V %1ѽ>Ыmy'320ɭnCt2ePF (5 љ^."av%43_G5>.?oJ&2|rͩ^a'{AJq]D#|:׃Yr'B4fA@/&hIuL؋BqC8'lk,cR[ }v'u 2Săɽz6wLbuF!()[ҫCFo"D`EŔN$d[uQPPU/7w,I&X靺ett]b4$p>80a]5&*Dbž1ئl]Ng!,ZzOi(Fqf ;_IW]EQZ?4 UٴO-H86M=Ƨ8_YR}j>S2Wў ϤkQRܴJ:jz.zZ]Ʌ:ѓS>]dN;{Љm,EP WWV5#G97._iNdP !>$OgK[|a[kViތ~ 5&v5-!RD~ꓷQj" = ȿPAz BT:(Ecq2lYC@N? G|8n n[iR|rc)ybckLdAh*$8@TYqLdo<5^;fhEm5=>1^j9.zPjrS%~+pa7:ْV+)7̠!%u\Aౌ0c!ҀNc|u]ą'_g] :hus;;zg7RW=i\nK^u@oQ:x>ahv52cCws *Os|}.nWXfEnw|HW H $:&iBpJ&Af< nvSՎh YX-t81` :xkz7;Bs*dƢhJ1v7])J"v#+y3xpFn } WY_?ٺKxWQ1TMLvR~^"kW0ÜpOr q+OO\8d~V@.+ûDqCLu;HڤL80]21'`ڹp6ȿjȭ'ȑ'Aܤyd&LzC O*VwhQw$4mhvm[?T٢F53N[ک.QKvV~kH q d3SHh3[ٖۄFH$mo`7Z~k/z_v1G+n'K>aHK#9P[uN.u$~AZ4 Z{I} һڊTYByTWVwq!nbOi֦_7.71WAmI;Iz* 'O!hM\nω՚RcvRj7la%(Ҳ֑,K$15;%=^x$$eV8wD- vGav ))TsΩ-w \Sia 6nH5o(6}h5bP2i~L 0|Q>Shy;#4G>,1 P<Q):Л;OXٝ试˔p1b\ { ̫{՗Qse`b&$NԟT'Kt '>4фG]B+j&7>ظ_ 1Q,"Ȏ6@'A}y4ݳ ެ_݁+ʼnl=-)5 xkYiIO>;lN#Zܡ$5M#&~Ab5eH^C\]u^I7yߌ{ m6\ճ.IԄey&ۖ׫) #,Z"j20a`W> TeUma#lQ3ㇸ wLhm$W+z OߴTw.Nn2HF%Ҋ2pL]"lR[o݂DɅ7y`,{CJC 'lޤ3=62?BJ'•>T9& %2NH2Vjnc-vD}^I AԳ=5zkA%1;p{S;Ib Pn]bRü~x!]+ _LWKe<*[OMnnNTB>+i\r;63l6EAՈ$loY n>eJ̮W1>@,lYj%v 54:L ` Sn`Sz.@]zfvSfO~ Ag!G-4,39E7\'+> {7Ѓ>VmmWSϙ ޷YkgqM8E5ڿvBP%yQXcn|6JxO mm~jF|KbWm/_=0o e%;e}w8d+Tf%VTwo |6^TK3ջե bx0RA' npm^Dz3AdXF1诳$*8&WF(Ec45; l =_ WbFZl'-4 eqTYL|c:nn E ;7cbC{'sDo>Av\P9;nd 0֚Zj9!(T!ڒIL#~E|4Ӱlt'qGTx*]ա.pT cli6+h*Q{ѕTz1Sv{/bdNY=M#H__iFjCcs^G+:1]JNq fQ 7aβ줭A6G)ʼn{~즭/GQsx.-=. 245UimJԝ^QÅ{@H=ِV/Zd@/OEt(O>2)Q]c!Vn_{*=2Tډڎs:wL碔ȶ܌]^yW_yˋESX)_}ɛ eZIc3 .`Q UzF+< l̤R+*) %Zn-a)_=>B%$SW!J@'T!~_BIw zڒm{N Xaėj/E3G6t&E_n8Ql99K,t^qau|Q5i^ΚmUͣy|lcUsӒ qk;?P@~Ve(xnjqǨƯNO 1TC7bp{y&jK4 O#[۽.Qම: x~")T),O=+NjWmܗ5(':|d՟;cΤoi)﹟`˜>պ1vpH[MXz!>ưQs0rWt~+r)cqo5b A)Rd>ꦤ>HJB<@/*L+䱺 f)7g] >bS|ucG!ndE1l:*Ş 6mkoQb`w'N6o@^ !2|H؂ٌz>UVD9޻aV.ӹ3Sru&5z % JrOTty^ߔO&O8lD .D`}xl~Q^/6\ߋpYgqGn%YK %躡)I4c8PgH,Jx"dqFl)4,CjfW(0h/AJcn4lV)l;LVq*oߝiI>xFiw`zZ,4 1 0rdg *omkEWKk3Z.R{Lm8{BR,'fWoʴarOA%BK\[/kzߚ/`~%'jjhh4,1BTZltpڅlvO 3 ~Q"Ͳ0jQmzQf]kbp@b 5仇[FOaE#LvҎr)~n[]½ډFr[Wc*j Rlz?վ/rctzz\kg$ #cs49p )xY.[umWD~NyQje[a Թظ92no35Ee %ffCX"+}`9붚v/3Qt {U*D3C i$bZ-Ĥv&9gmd s]A!l%ާ5~'|c?>؛K9( JW;ᣵW%!?*Jr|$ٳ̆gXM,g_5hH c9;YQzNG9ihkC\Te=pG>eC:4Gek4^>yyY[ _v`y6 ~L@l/yښdu8(0S}ERxq X,"obNA:<0yɔ3[s*<2*rř@pJм" g`ښqS_MT&HY pc%;CD?/olbБׁNg=}j|ٛkF;~m酪B:6]6612]ß>POώOJ xFClN5 ftS }Wrimojv<)_C[wx}*)"IDY37mQ?jjޜ+k!H. \er-iũ㖀l|1cBwk#RQs?lt_- 0x%T>;l+|{<b2%kԆh2ZO״۬V<G63p F;|USR[lũJyHw:Ka-%йWN| +R 7&_ :fLa?gpE"R. w@'6)~9&r2!"gZF[* a@̰" #_1O;&)3 Yn$#~Mb4G18_ٝ l]j5H 7rV#2!dXS͊MknjFt-ǁk&GXYS/f =p# F4s Av+@<1٩D [ Q>u} (R'Й#-3{QeJ}s ޲*%S-R39"~113/bu~ ԴՉ2c<ހ,nS f/~qul m7׉O+]|oe+!; +-A0kCEyc΢rF|Umv OL@zD`^8?'i)`pMsK S!/8 R#Zs>i܌f3O蘝I戝@?Q͚ė7ԍ"|\`$Mɠa\ptfl^9tJt!i濢.2s6>KHjV )|I(wuZߢ)q3=,Yg>6KAfԭ!W2;Ѭ}CndRQm0mq QXZGv% b-YHdq B%޿zF'{ٖQ| bGЈ2/ {K9ID.ZFk6Or%56T]=ѐm^4M nͶR̅n+" K-+<ۿzP:f])V4`(4\}~V Wl L1=##;8Зp>2:4wmLɦN,Qs+mYFn,>У1<ӹSa #Ha}o4Z !';j\_d3$ ٖO(U ?%y%W={^ .AhVݝ{ubq%9Ub©p5H^#_>ISZ(j3(iW .pRJ6]1)#T8uMˊE4)kWT%99w}˿E0(1"e13Q:ulfge<(`zĈflw;Ѷi kQI4N#O)f+ֶxd/J-*y8|LdehNrи$ "Sӡ*ބ1D816atm`lI ʐ ~[(_պo!&}qSm>F\FbkJLya|Jc"9Vv-#ALOg)r̓7Kw'r]$H (g.?عȧ^` 1-PpI 9nM?/BҢ!TB]%ccU [k:躇muLs74 KR+>hV6{몦fQr!md{o_b{Y[;u?=[kbZqzȬ8o+??ߋ:b]D 5sb}٥֦ GɾrI#Qc{[(}AY'!&S[,͏RFw1st:reƋŜ-6CF/,Рgw&/啲!8Op"CXF+%努h 5?C4UlS11·%vtR.:R,_p%s7N8"-qg"~ #G Z.%+IWszΗKkRpɅmISz\,\eMkQKlJ##\o/}i7\{+!r \е~ ^o( /B3EMry2#[}Q9 s[uɷiPagn?/Qp$ŻRDF|_ >E3}‚5Q;sJ>q1ݭ4=;v9fD2lH۾2dDSALE {L#JjdhkOeY#+JȩQ/0(SW!3By~HB GV²>Fb 0xB3|;O~?GlKP'V-=' U ANu^SQ{|҆R,W V $ƟE}ms-;UVB %bim@t`%goԃmHg֏#:SH,=wEMtM>8%2%OT{2T6[)%S&z$D0nN[lD3#_n(g"78]FvLʣx#Mg措>=%ۓ2P Pާru@#ɳ>TaDa@F;]fJVnI}aW=>ɧUG6Is:bC_zM"ltPo׷Y/;i&z|i$ձ$NM4x̆oȩ˥KOﯵ㐉b"K~^x+:4;! ^i)fM90~!1`&\l?\m^rS*Zlp1akv@ZqcBQp~&`0t$%g C 9Ye% |$:m%RKf{a5eλI+/U\OIBv($nt@Yc#}H7C$(wsQ|SN'g=H_Μx,nB8${F Uz3ܓbRaLSY 3M7!'1ĥ#nya} NG G ]:vX BgFǃ*h5S"X҂$NuFBi'c ckWTbxF.wU](JuDexX^~qdOGzFbV(7WofYhĬr]`)Tҟ>5/_^ "֕O}A te) y_+m𰽲"XL sZ}+D@P MXEl7iۙ(X2e6OO7L ם֗w Q<>j~6NRP nVMt̞o Q,ۜz~"q2.czOݎ_vbՋ$0>mCQL#ݲ#3-4jʋ]X w=5) E%GJBRTՈ]rz0l,̮zD.xtWcPaJE4X&5'p"~եҧ SHEz*hk+;iPFPBZ8a"pmvs+M1ɘL>=9D\jt|&\^~V%XWiCRol$f+Y=4plJ5vPge/BS sr6(+K f:^/zȅtDųgw.(gㄖ災s+tӬF(2?3Utk"Tk&B7U$7ehBTţ"9lVCpoOkX4/l9crqhN}п*([MzH6hHdk̂!@ b y7߯(.#F~/QZ){@^ƣT!)/kH" k"}qsA)fuC)u%]P}Ӂ/:bXcsE>Qz5s2g]5^<֛C͈q%ԎYMwM/|9؊3#L R&i@܅e=+[ZxXRڻȏwPzPҽMb.v%Ps{gxE*Nq*.n"{"tU$ s3v2ˏԏo2Qb4[>bmHPYi~akU9}Fkr%)73KEa}aeun5?p"{EeB9j֚ ?Q2練k :;p-ofpPPMBU~wsvm?kp) m{50yNLEz-ܭqVL0 >x zBac27Z QWs3 6iR%M=ĆF|(IzAB/M A'cۻh* kn.vvp6(i3eEEq5{ngZN6ާNMPuDCl!llOM@2%4 `,e_+jQlh\g*9 ߏޕg$ pE"m$3J@(ʿGٵHzlmݴDss''t_ȭiDAn秹1N { s]wfvBϳhb?д*No/ȓ񳄜xHT+D^v{ Şq\gr6K-խ6dYbd%֑ګߧDRp8S?)z 4m08wrDHeSjZyd$Ć~_k+s SYjeg;Yv_[R3m" JZxwQ 1%c^u #Ah/O8%igZn5\zeqnnAf,1ly@w~;K ٺ_l*mp&ܮ:5Y+k#+e ~6LCy駁+sJ'|79[kPɎha:Rȸ" 3 {/>Hfm&ws~T@P#O6z89kMsl JmpJQL1p5Ҟ:ĊgH@il"Tj/+A;J*ӭ*&'!w ;[D ѼSߜv{V<7Ώ+G{G_u(SѴQM&"1!X19H=TbB傰e*(Kiۼ7P׹e,ʹtAk7k'iY R-m  Gyu^{YUz_i=Yl6T;*=Z v%#X$՗v詰>kؽ1xk0Gj.;ֲd=%u|]$=f}aa>c7Z @eZ?l i|sFpHsD[ޒpX]epmfscpY4vXؿEU?)9;O詚8c~9* ?1=Y㠎o܁| .c;PBd~SO{/9Q' wI~@vb 5ϘKeK[HX0{!4􏽑Μo&ӦWbrdwY)IIrLP]_{6Gs7GuEjH:awmNPt:P[yg{^@$.<ﮦusn!$E7vf]gJ%wU~%+iIB5 痵%d_֑OemrN oӊH}̬~ca~@K1v"}5famR4M4+4weC(@ Q>D=֘LlVŠi ܊oS8YShN QLʓĺw;z dC+!YWҊW|"峝Hڞ^m\1O̞FЃN#V>RQQAm}v\͊iK'  IѷE\GЉR~ɱ ҟ8{F~"e6]Tm4Gyo3 4)U$[ۺFfp0n.n83Vfkn 6շ(%`[F5>݁,vֳʆ32:XŠ% FopP3iۋ^;%R#jhcϢ1G9\'טcq~\TӇ"Ùi`EiZ}ؙp_R Q!_]P9(!-(ҍp(8v}+myHꂓq9eT@~ᦘ(P4h]$z/Gh<9V4tc&+/`2adjWh-/=E U2PP YlC`U9}v ?`K P.Bp:PG)i6}(d=| bhe.ݩ~eGem3%vqw%3I&H]d*Y=P8Q晧`5;R资$du+Le&S J6ϭk ߢH~%Ysж֒_, R(|EP\۫BI( $.{Zb?FS: >v{*c֒#Ǭdku )ĩ@6e.Gf Z\l1l=1ךi1͒0~)0JFB?ɖ!ΊVs]`7{bFLu!.A25id {U8<_ j<a̼=طGgͬ,-'~DIz軄La,b0 AV",Lz q!N(.C#,u(KB {F˷yNbbBuPȼI>Pwx;3z!E$/p0jǣҤδ'%Q@"ˑ1;w ~|Q&ɼx>нgK LU˚hin~`מTv~M(Ur*/ɦAo|;9DZmWOpK2Re*c\N4W[W_c($Yn_ZoߴZ>*6A,3>wMmB¡t-%`j˸؍$Ů/ nfBꇘVN&;̄#ѩE}i*cđBp7wCqgJGhb쯡jmcq]o{$&4U]TNB0S4܅k:yKiAaȉ JBTEx7(_i_9|#׵2Ҡ.MżbM!֤ tpD9gJ){ w7ϑ%q 1s,:~CM#QNGl JGvs*IyGT2 }1ث%fbKtҫ?yۊ4oCj5`|-7ϫ2PticI\ >Ģ1p1ncJybS_:=@P~1EQ,"DXXҍ]%b]hgHqpŚfF&kru`dl;tOoRE[x('`*80YwyBckf~ޑ4*1x2&8PT}mak|;h1ģw;,B`zYd(oŅ\񅃋˕x#<"j,폡+cyUdC̏!/ƸZS4k'">84 /e7_ݰun:[f70wp뢕Nnp}^VG>Z/Qo"'uN?ĂbUwd4+V-m3&!<+|Q#8cD/4me핐9: =KSHLf) J W>(ti=]GVg6̷s b]'GQ}U99|X Y\> q> stream xڍxTT6"̀t "90 % H()] JZ߷f=繮=6;1#Re]cK! M(o;> EGm*(4PhyB"!1)!q)  J DxHT콠]F+#}=N(to\ n8p tQ`7tE= `A(qFܥݐ'9n>7 0#^`Gzn? Lȿ @`PDxtu@ gsBBJ'W"(w=psBN|P|{/= @{Cahj{C<( 54mV;*#pW*P0ツ@᎐_c8z ¡<*0hmN`@()&&*?}@΂ ;3#@(G{(Op:8BA( wv k> ;k4p῏XPLr*)!|’~I1 @HHH .. g{>#VA$jO\ g.=`׿( /BgWݑ' ۻAahz*Eo9/nD٣ՠwB3__v(R v4@//p AGC ~ 5Ϻp؄EFDBhU:}~ (G!nP'Ahߚ} 9H:exe"71յ$hQ3glGJv54#O *"y/} Y(@ao(};}frD9M? =J6d$¶L5z @r(='aɠG+wa>da[C [>$I 44|MPꈣa5܁"'Eڽb5~Z,#)ɹZ-H %s$VH,;3EEyT++Ŧb4t-ԝA_X`.5>1_Iӱhb鱸yZe ?n}1u`;dIMn=Gjƣ*מGtr''cR~ 0ɚh&B\hB:owR*B1xR3Vt`[*$w {ݶIr8Ƴ.zlWǩmKV9[)PadK^a${׭ ņ 磌2_Ovroh4]c; K Eە? PƘZ tyBϾY]H qn;r^HI@F̹!)Q!MmBU~)Tx. i߄k/QY=i%mRw?>e@^ 5* Ue [_EDw-kG*m8іWN#[I,gG, Tun7lִU 4}i)v9;ðһN%|qQ)=5 ,Kf+?ۇ) OS{ҘreGlUu=d֜M=etpH9};PhF/j$ӕ*RԼ4l^&/us]|Ob 765lkW!",k; $NX}_`ja/TL%Y1Lz><7lZ+ְ'3.E4O-l P'NU(K9I1 iFs7Vg>OE W'(J1{N~z 񏚴!Uq~&Y䟕>;xz`1) T>]Y|1B$ZFv}W}YU s0'+ԟ]1e=²Zbٿj_؞yxj3ĚTقl#nÝb/؞spqa*ӘP:q;8_Q$KLIt eWX?1uQAn-3=50P0!Jtd?Zh8_IWq̎Vsh_+ Tm9>m_m}-?Gyp*f:%ԏ-1mL2`_yD!vFAv+/iUF4 Յ@(wR(ܺC<+{hC7v}ƘXH66︇:T-l8rV~ok&x+!H`Mm$5]_xib:6GG]|̯yAcJ rn+4pn9r''+PCĎ-(ɹ,".\̛,_-l+6d6,p-N/1o_ač3o+~j /#HM\b&;}T\ԗJxr+Ew`o^朑juΎ\4Pn;bU $('<-ȷn*TNzUǵf6e5V& 7[(=Yy$BPMӛ^yD'yoZAx@[-րͱtZOעޮ.*nD5n 0LB|E1m5GeNôɧG ۳oI~$Zy%H=?3vd܀ĬBK9Ka>K^_z5s`*:GDB. ሳNNIU0%Q\xH轧Q_ ʕrl?9LFCmG z.=s/*^1c=w)j#b0_*aQRP񜯳)GMOHvFE(ܵXLVo03m7A3望ɡVQ~=#tHٺ!NccIբv$Y'<۵Ģ"%jN3 󔲶7s˫:8!f^}2u 7)8iPݝS9 <ˬ?HT=;Bg}x@`aѴr]jcYiPY7[<#8[8}1F\ OA znO?<`y20"2:m}'Εz6e'}nIyg@&J/&UQ:Z8ٸOˌEx]h<NV,G9M`25Hx暣e(@fuEAJ4QMpLc_N}LN;mfaMRƣۙrDc]"rZ~*z:,X%xt \"c/1Oc46zU. :(P>Y]"`4[rCY߫>AZ jI^)cg(~; 3}j9+aRV3G]jSض?fIS{pi'sIvㄸlXAG'r䡺ydlr ~s=ēv]ڨc_')c0dr W} [JUճOTWPSKN ٮ^R} ϕuTqXTe2bj0eK+;>_˼=I['DrSFJG @29/֕fscpȵe&-⮟j)iێNhvByFd2} /^ME铧j_M/ X_iGFVuL[vU\xPGzlFi'W\7d\Iq0_f)>+NiH5xf ڴJcп;lr*Z 3:y\ߩm+jB{p յ 5ZbkF+PLClݑHew*4q.q(R`u=aWH)SK\$/dkm,H{?WwfED~>1B94sRImuJ)wb, 4 XLw[TX֛/Rd,bܽIm|󕭍qPzlC؉W/q6__zxvP0ĠVh!Bc{Ik;_:gnfizo9r|Uq|V+5V1S:{mÙp -LՔ^ABzmeZL@`H(qHEo5iIKukFSdh߭O}&qʢN8}$lQO8cP\KXXi˅U 2Qrc=no9jNLF$đ΋%f$efo;NKjA9żO6,^*th3Ҿp9)x[CJͲFzv }a!V35],|ˋIpý[V9~)l`eYѧFv\ I}序7宒k|>l3]JWCzq, ^cIl?P(߱{ss!Fg̠ NJexގ6/hg[箄I{}n1Gy+9M&w߼a8Igj>Tfd7ݣUEGHz&w8e|񹏝1{[hreu%+p'~*8EoYk-h×tgu0s@--Jcvrmz[T|`3iRY]j97hc4TܐPTVnCga7_}oҪ,9W/,L'Yd5_=SkŦ _fZ#x^7$~5˚0M}dv[0U;zQyAշ̧Cc[޳?|-ӎwg\(\#mbf+i'0C/t7G1mvRa4ud'y3"\V{\,Tnƾ CFo}f0¸"k d#*j{oVUK5M,KR|3);"мYO0> stream xڍvT6)%2@FJw`&lmAENETix{=;g~s_TTriXQ0$P7@@H u5SZ8 )hAx> r%`iy<Djo8蠐0 *bepdD~04 C0`rðPtb=|||BA@?)xfp)Ax; #P/0zz"wN7W"8w0 qp jX cPx7q~wh( qB= kD_i𷬎՟ s_;Nfݐ(߃3 u5C iM) :Jov{<!`pgh/XvDp',GR'; ss/ ؀@>~t{b憖f&OE I*.3 B(ܟf_6e“mAR 'ConH[pCpw_^X ]0zYW鄂4FCp <ĥ`0D(,>1BSZ@ ǸIN^h4^gyoQ`0'iB轺жZe6DH),߻W=, Ohա Oǧ؄vrQY]C}ej-q+8F c t^MfH9_"v,>T?0dHaQMBj4~hq2I̗D|kzJv΋DŽ<L* VǚK]B>uZLcK@<xnxrٞZ֡yؔ i-k)i6u' kwi#ڟHh/)huVg0lhlWR [ ]/~-%iū ,ۂ"RϾ|DXL7D!j"$Y&Rֹ㾱F l:K2)&*R\t4 z1zRïS!g tk+>+ 0};EH09%&/(ӡ5;&Hh^f.7ZdNj˦%p*]awUy\e';, 5SG6 l]. 49gڡ2aV["[B= t`S%vN 骧+v[2{=ߖCms&[j~ơyO:#G~)q3p/R!%3iVcET-7?|6;ZP.Рvf#K*'#VEKH+|8'qBKEpQp+d*WQx&";>a֗ot3yz5t[s\-bVhxBri=tfEtmDxoGDJ'+1YuҊLE1Bַ 8ˇ(*p[ mjE|kYVxa6%:OK]'#DCb GQ36OU#C鯡m썾7חWe?\f_߯-_ʧ[hQ ?5QO ,JBY{1C~lNB@tA3SW[Ꝉ ezn%LXQ(!}dBTN͠ށTMƏzKjnjh &5M%}Yjl۴liW-kcfx7?̧K {Px"G'E =Vӥn,~`LvWy@kT}Zp7Wo<@(AwVe@p̤*8Xyc~RH{βdLNpi`IA7YʴiW o8|iGZ |QD>5&áܽ 嫇P\}my3} sglbhpNH-V3]*Ejkۥ, 0k3Q}L$5pu|x1 :fYA@1>$&=RhgO7FM. *\]қl:8뵸fq&WQ)Nശn?={̲}铿5 =4(&uîє[!M%#O9u+ 䎵|\)_ dJ`/K~+ܝ8<%^oBV3o }(뼾H|8ГeUPtDHk" `.QbE,7_G ,GjF2ϣPQn;Ie@ϊ@-"{DsՎE@̏ku48v=x{(^s w|pަjv`L3~WTsztzNe s.L֋ :"8ԤV]Wy7[N8kJϼfi&4@.(#'d0ȝa[TX-IjA5Uk?`%;ߑC n }x_Ͳ&$ZR24q{2=^Ƭ[jzZpJOn^Uqx{ JHҍ9HYk/ zӭ*u)ޢ&%<3jMjA+DW9ڌ)x׵. R[Oqf|9A7Nq1UDEP'-L['=5Ԩ~\ۈ OHB6O NMkS+΁Aҳ@愱c4AJzs* R&GXo]-,֚/p'r%XAs& ;H,펥-\R^fqԘr?&l*Ȃ!dLgε8/C{B_2ҝW MDJ⼯mk/ujyoTOt{k/.tE aؒR.JNz?[DbS\YH/ ]7aKI(?˞8ⷛI.}dc!z|s[~w'2mv:7v/ͽt!ӾTx1VHt{*ԓi:Yjwb[L}溦IS+K0}Gf3+!?@P󯛸lںo!{;Oi:uۖ) ?Oz .}(-ܓ?ERqVݟqU'Jh_qѝ63뭜zjH۫T` d^=5[ű(sW{}Z},Io`b+xд&evhVFj% #MnhС#\f@SA"/ ڣj  s?LGU(/?„K1 Q mt;ʇKZ[W}i)I )-G &!{eviZYZ\~Lyk;'UQD]{Jo 8:!%voy=Ѯݠ37y(?g37,S[9!I |l1'@m4j4 aţoH Y߈YU20OdNT(Ny62>s^'oe)Bȴkg=<%VȐ7 [ԂMx)Vv2O`$B/HB+yo] -_ w+$Y]T|J<~*lrx\y t/m`ϡ\1ʿkQ#P(귥?bxL& TX~-:!֘:&4y&AvMGtJ82+@itUXzSm;)#L 'USPg&I36Pm >|=;ӝ}0:2eD[7,L1X> WR7Ēg#Ji_hM?i OBijs_%P}vij~bk=vOF>p3{GF61u$idqG;?vպ8)0; 3<&4D1z_dyK$JkvX4ī?X+2zVcH'0{s/-^ #:L1);e_ .wSC7UZCo^c F6`u"?{[̖p4G$J?tt$,OnZ6LD@-ۣpk1͸Pщ%m23?3ζE|Rpyvǫ$c-zKEc#>ĵ9u'/$ ztQ@W%6)"Зr5 : j.&b[ cV(>z<9sJN,[MTKقJsL}@:Ƙ>0L`)Xfj+1۾lt`i曔/䛘@&4ΰhrDaiL+W i=2b[m/:z;|9K{9U,Tb-O%qwAN[:[]V^认#(yj}fFS:.' LL{zU.?.;v8wS!5{1.Ȗ`f+mKˬn<|?V8ʲF4MIU&G˦gtmQ=%t%Ο4ù,_ܐrc[aUM|Ttm}ka!,I/7Ƴm3rsyi-Jԟ9,2];o8we%ϩolĹQܭ 1 6©!zo M{t%l`{cMIIˏ*&Ql4uH}mk3_Ȃަ^h30hgR;uiQs 7nC]NR> TqڧttHjYx>(Ճ Lm >|h QPZ),dp9q-%$ 9q-6ZV d>QfK 1\2VL()ЪK?$ |Rj_mN44G&K ޯ$"UJbD?<n5 endstream endobj 544 0 obj << /Length1 1378 /Length2 6059 /Length3 0 /Length 7008 /Filter /FlateDecode >> stream xڍtT.(1( !!!ݍH C0tw#!%%*(% -({׺wZ|Ϯw?k'g*#(>~$@AD  @D0t! Qh"i!uw'@@TR@L@"EPGnD o$.W !!vw8@ApeuFW G .){E;#v20=@Ez@mh&' (O0 @`( "5M pnN7W"w0A80(kP`/ {aN`k4w`xn$s5"W-+mP8ʍW0$vo:p߿[6.Gp;TM/m"$DEPW b)ˌEBDn`(to?ODDɎ6CmG¼f 4_=Aw~F r&O^I1qA?a'V nH}M/\4k$Tc,/wCNN\ s @-CCV jswo Mf>a~;Mх (Ԝ`p. ۂˇpC ?*!_:H7M'AZ6PL:`@ZvC ssDo H$ZhyoUC^P8 ̡*{'bstIgsⅯ&sWy+rto"yzKSCa(u.8deA0 G/'1߶21xgX۵«fuSm3-/ۼkƾ51'K@e=Š%7_x/%ddwu h'ZAIOΖۗMO^)/r*5}ss՘{B=P'LFVihbCzV#0UsA6KzךUjV'R=&91t}m\ ysƱu.c?e&4HQ62֬"~?^3WJAApxm:f>,w0,/UE_B(\rMkY fWj6{_ĩT,f̯-3_^jf !ev孛*}V_)aVPuHR=?1F K1ܭf k(qiq]fOZT2U@k:S } p,OQ,l RVx{[GY7%\IG/.;9n3ȡ(V Ѯ y>eFܪS3d[L&$;|zY&IVw)B0D>Ēʭ)oE9xg!=ސӏɀuv/-4_?_9qE/֐ @"9ԡHꊫ]IuȩکvaF]#3. X-4Ms,~AnaQoΒPL,?q"n,+[D@n˫i^9$+9/FA>'O3/f¼g*s fGw.!8sw3o< ]6S{-7a&* #U ? uܔH6Fj=Vq='ky6@p,`{r14'YJ|WdGL@Si⥁buJRP_?K@Rΰ\{Qx)ժ"Sxߊt&(9$Ss}b HqA0Wi棏θ_ynj;^? >%J⣊ud^\5IVTo+OQwq{#:%s8[%?{ǽͦ=x;n:.f%4AfGK۸=Mԭ6gNfOa6ݲŁLorq#ҍV]&<*7m5RڤAOˣ.n)z!("Դ_Or~*4ȃ1,97;BM G掍t=~+sz(*`-UuI swVhR?y&<;?>d(} ]Rya(bU}H_+UfnmJdmHf-}5"$cc4!p-GʔbL#fON ag~az@Xφ$oDglA,-#:){=/XNv1w[DwA^OpCxɃ< >2oh=0]LTaȪ.Sfz8π_Ds{1ۺ0x v֬q%|ǭK{Dx9*c9oľ!T]8eWuԸiZxv|f;|wSFmbenQQt|_w+y +HpQ@DFD msmp~sj_ӗɎ9JI?(b qq K Owܴ ŷyJLqLsx}ϒ'zg̕~DR!]u-Oiuo_[t37VrZ\ nDFh;ùMeDm<3u>ǵ.0Q%4 Û'tkiFV#V[ڿ:$3|9q%nL/1\c tVUp1YO,MԔx'7&o=:+SPV,Zq!au`nlYױs3˅tq9|7뼌#6s xR<y#DxD_aKlL$%B̲ jȹ8iܔ7!0=8}qrwDMNnw_ 0 3  |!&ӂSA߇^k}a j|alyEV6\UH5\J!MG-S9oSW/rq*5buGpLwq3դ/֫>+?Ŏcμ_#>n⛘#~IA}8e\s x#\͸Ǘ1uݹ~3`@%B,I.#VmoBK7 :"uqL̍Y:~7vG,x+Ol &=N0;l;{0j.յYP3dZt9CT_f`m`ÿ=tztg[c{ 4# ϔk|r<[jw{=p@S _] hI|"dYudw>%NIMiw=E5SPsjeM֮7bCGqS e}3S ~O_PUn C%D~&=0Ircb\vZ#c4F|Ux_ظR VmŴ[ FMD4b'ii;H&W*LJ@)FZ&ŰWۼ=mj^ߖStٱ"Յ M^g e6=sEF84VlaXUQVب}4@9w SI`CN͞4uBP 0le'c>ڇcZ¢'ࡥb1ͱ?opcXf$ST-;YU{0|wz]z~ yꨏq2F5a˛G:HU/QD.et(BW2j]6M`ӅVYp~ZzM.a`t~7h-yZ<*Tm;$=AzG< @utA@& On/ߢ zFBÎ:Vyy:{6=eReı[ăl8d vaniO O3.pvQ~bz&x !V&Ӂ@[zXS˼6-G[= zzkMN\`P ` poÙ#U:8O1$-bÞIiF!T%mj}::>Ow]F~щY,]:tK}f6g^,>)4_khZLNanKEvGiA;QӼ Iq-< oBo xN)^$7ge4^ l6Ig 9?榅F!JM^f87H'ȉ2yHy!|n!"jU嶓;}}^Y2I({51V-Kw {҃Ń 6smGVZ/ tCM7q)[c/&(Q鷊eӢ`3`w~1a_kiç}ߞ:VcUi+ep|rAO[ӨRYy9)e$5'PcGGAy )"Sqdy @=ȲX)㷨ӦC3s &zKNb߂6/B4D|as7~tYKiqzֶ;6 䶂&! {jZ\l%@PY-ADm=[;.K8N:2OÛ5L$y,Feٟ$in]^튰Qe!W:RMkj6#S)dR{G<πպk;+V - ''.BPUZܰ6ǻ?><"~mUΉ;wj95f|i" =W;g(v0J*3~(,mw`yбw2 2Gq]3^A<Cz+tFY|*jCZ+լd XjH(ps Y902F)LP 6V~?$>WH;lSvL759="zYYݒughԗi~'NU]-ocPS]K cm wŏC?jcxp{RI9_?ZIgkΪ\ >$jE;jD?[׌qiyki0rrē+w (Hb.UV P\Ϯwiٟ:|I)f1+i舠㷰S3񻕼)b^V={޿|j =!N9چwyQzV~.xY8 ?]ǚaz!ŷP>7d-Avv+۩.T_WI/&> 1RI_O8Qs{BΤ\:r_r(>"b*f "˼.y-IZEyN])н+G*>K5Y /:hm&"JrvT>+mg-e hn-Hwz)lOplK~q? 'ѩ(Roc endstream endobj 546 0 obj << /Length1 1989 /Length2 15310 /Length3 0 /Length 16538 /Filter /FlateDecode >> stream xڍP[ Iwwww='C?9{UTͼOw?-ku5SCIE(aaf`ʫI33XX))լ)5NV X:21c𻡼= b `f0s0s01Xkh3v23dA@gDJQ{'+ K{>MiܜNV 1hjoj{/|`#3 = l P:\fJ(S"%@ U{s.2). 3=:@UZX,#+пƦv +Pc mƮV&J ! 0~?9:Y9l/7,2Έ'f4}_wl _dn23 3Fu PZ?6"Ŀe@0 tM- /{ >^2>VD/gcW #Dff)`!] 47~'+w.{1zϓ{كl=63ʪHQػ3Y>ۏWdnwߔ]`޹@ߍdEym6Yzs]S o> k ͬ\Vl> o;L lj\y읭:aYLLG>d6杻{kK|Wdjoװs=ߧ f02{>s{'Ŀ6(߈(70d0JX#.{STFlFA<{t7}+acKbg7d4d02{ZVRۻ8nb `` ]f?{Ave`e{v}Q ?ja~Oc-Xn ?{~nCwg^'ջ.NN/׭MMyk?}ٟdaC'p=T{f͋\;|4)$JRgSOSgV98)όT-Ø 5gC}fO ׃ca 5ȮrdЪO߾Z6ڶ>'LkfK#z:cXi4TR-mƅf[0z͸y|RTc)}PyWXG`jWmi-ERݍUebb,exS#xrVR|I#Z߼vݼoܷ]}5p &We~m(ijM%̑ɒ}\y*MK)v](gd#ICM1y͐rc$[>Is'#%<-b e,4xτ<V aS8옟$[?3- =Sn&4]q7?0˓F7׬n9atkx&MP?z$V>9A8+و?~zx{q4--(t1ԯ:&M"n:LDpce(E t vEXq61 POYn\dg1,_ 2z7Z˶Wy_$&}$a+djFW.*FY3 dx]|Ϻf$X=iզU(Fm#8* vcK,$'wͼS%iQqgi=w 0}.,FKM~O} t饜.պIo2Qj7􁎂JJ׮l<.C:n@s]w `{̺:z=8m|9~mT/X7V`{|C!?hT%r@]?gK&a>2k1 Fh~EY9$4*&\%فp $v8i c_DIP&6ͽ}j+Way2 ^9nS0̨1oV] K*rSI9P<`Cm%mK ^՝!Z~5M&˂hM!EERSOnsYj'7_|9+oN8U}VN^dL4:lt'jyH= <]mC=ӭ&O:b,P8ne!UQz|O.Ȃ|Q-V\29&"h/(i=Ǚ4>fZR۩/b!pu-=hsjMd.o)Ù^4o[o x:ƲT,J|۠ٚD[~ї%-t*/8 1$ ƳxD-J jAa~\dIkk@,-C# ԸT oˏ- ̏T(*CL0ϧدݛGtv'NI %`Gh9lӀ96}q%;~pkn hƣJSP<%,n{/~TQz%H0di̘K7ҫ-Y{۵·]T%VnYFM bn&ﻠ]fRB?}&v^8mtLY.Z줬Ù}*zs* Uھc,4<= Ei':5m.>:ldsuA]pgnti4O3\4IX!<1 P腮 {æ=PZ`]C'#0N xc񆢜!*`~aryyޤ9E%̣4>ޭw=Vxx*d?e(,Rp_֧CĚ/|Khę[$e|i_)c٧Kb?Y:4Ǎ2ss 倘 #X2Dڢ=cD[!|&VnqmqAzX}s6jr Mg N R}}k^ZҟNZz*>A:pbl33 T6=9<#a (I!B4\^Iv7*NśN//XBHM(7 Y0|H3f&wdB"".e"unDo$F+_hMŧ] : hwqٍ=ɞ'$j%3g: 7oDo~X;ME1e˩?%mL'ϼ3TZA# c [Q Qxp p sqٟ1=V税*# F$eoCk8ۺlLABqxuRz],4Nm5FꓕpF:lŰOrQ\m@f=2@让xM5s繿9l{s[g6lk ]ϱ_gg>0Iqa[HzΪZ Z`0[}AYuplԉ'pz&sxQhވ[?TԲA:#^M'vZh8|sU>+Mk O!O Z,scݤn)> P EWv(q]W XedP6)RG"~ȩ*JoЄ/rAG&,j?*T.(C5cݮg' t ֯W^ nkgoL{_ο5X vȄ-.蟺Yɍm7d$HIW?V>h_@0cI]PJM'b"oC.qOdQo>Ql Ǫ}@ aw!U"6h9_2f (sW~jC `|l@na;tM+s=uس_Pbw4mQw,yzv%pXџ.@OX|'Ly f#%]j+ϖv6{r0T(1Eꑥwr6x6W8A4pL=cL{^oRý[>i\-Y)uߎ[Xsa+5r\s)7ZG=ϾگՈ^Ǿ'EJ|dq 7TaAX0 5'8`O&H`@XuElI ;rJvU_1?:=z?-N8A +5dj?!@ݨ_XG;m+!Xk}s)? R d4o %c%ݤSiVnID^~'7Ȣ&ޑӝHMm5ɜX6=I{oWd<5ƒĪaᵤ0V3;hX(Al8~EOUv*zV8Bګ{t3vF`lP_«FQ &A)_uzѦ=[ b1'T1ٮڮN?& Wt!ͻIrIiKbv }' ܊R Yl`J Z]肋K#֞NrbO s[H Osu@w1NDJq:lÚ~˩I:\\&}ZJrK+4n`2TӜmNXa_nfμܪeaLr"ͧn%Hh >vK*QEji3:\aXNyMH|Vh)q^g`r=jVD0V0)enV :'dئE0_-IPA8NDVS h 0Cai ABjP ෱.:*2EC9R$e]/^Q*>})#w!N]5&AErmB8X[14̒I(˛RW2Dv&}ɍBFAM㭺_A5yeEedY s1*^+Ό2t 6X9)6Ăd͖g@Kn[$+rY<',@=PDͥO\%M.21BFP344bTAkT/Q!$ԖyuM#7/.޻PUužhPNqyxfv$\&(ZXd.Wtc a7jQҥMH1[OiK )7h=|0Q,ԙ~ק @b  Qn;nZWA@NI's[sr.j'J*V. uvN̫+ ^DiɱsnPc2~mT&0GRovZswنm{.Dx`]Hz)oinM^>p>o[mssǎ7coj@AyӀc|kd@3hxɪyЭ,p*jl:Ge֠f9w&.8s*(s&Xw2$/Ŧ)[>x :foNf֟1E#U_9!7G=\:q!k)0(EBHcU{w|ތbTc鯧 ٘1N4[0_'=#f'QB~~?ɛEc#WlqO?T 0HЮ, MRwcuE(>}AKy5 AB:Lg$ͶQQoӓkr>ƲTLK$IsS.:}ƭwU| YJ~6ۼL:A@' qm"֮u 8u~d[֮ƗD+[SDXJҲbYP5xiQ2 *&R  uRFǚ)'ܮ~8⌝F-H&>fK-Tʈ!IN_=Lu{c]gCy cFKiV>$-/ 0/[*+#֯҇B2u.N豆׏Zy6#>$s5w9gIf+zKӣWl1F Tԫ}X8,ZSI'MESstH&xaV񝉧eo%Cn F::W UOm?8WGv[^РuU+ex9M-(~8 " b\e`ΒH@1/~'JIFzg[,qrM6WHnȻ;b"i{:4jI^|tnBXs3\ߍ|(..pSuFfyJ`:glmƂ,~i5 ڣDMvs(}r40E6Zwܻ>+?:]RoLIX_jI; ,O9# b1-1]̊.ďm6ҩz!iO+F4$ tBRv\=-{+.-ra-}3jK4n81&;!iq5WW?nFA+uW}ߍdPX'y'rW y%p÷>6\醔רQk HRl>Q7&crpYê;v6mIZ]Kӹ#E";= 4"' 56-G'Qawd帇 }wؗu9oh%Ρ(x!Uqf!l<}w#ЋKe;d-^EdRwz8l׫֔:pK:IYOGj=4C@F|{N]02:mѦGPӽfv@&G~kerV=$tqijDGɁQU{QNw9CCFn}ag/%$Fouhy &(Adc9КG|"y .Mgo]񵦴oLޮgefZd^gs۪;zI/@~#I.g"4lj۝>Sr{:(7!fV1.̣8Ha³_1z Ƶ{ uQAF'] n%bΦ;&4N.--f˒-ٜӞ -xS|m+ ^8#Jz%mG@PőaT>,v8Ä.rt+ z(Ae\<}7aDJn WD]^Xn_?}.<93<Q2#g&5`~U V*m&On卒1ج5zǒ$?2t8mı&&Yo?'VD -WsW~%7N`=Kq(fOa#~qvKڊ$!E1 |_T$G'ԇ_ ߆c<#qʌվLi( 1-$G bXc[=El jtʻQk:r `q6( #kTHSOkyK%e6X׬XS/O ,SqV[@HGVv J4Ƥ%AwM xۮ08\}Xam s@+ʅ`7BAuNOԛG|7Ŷ"\rf\6Ɂϔ$5<X-ԛp*I=yyЫDlc~]byQgͯE 4o$t=%KQ!Wߢ X>!#ɊFEW.EMAs֡F֑OZ#OMw)z"vmv 2 u/ȵIhy{c|\){gu7%7S < g1l? SFLD FG@#޴dciSc0%|!ɡXl _0ϏVGr4@eɺ}|RkVlUv*U1Q<1RxELzM3P蠄s|؂uz (+""UK?p$>bI@ Q<P}1^Tza}#|T|]8=; nc:tږF~aAYFsTmF72^ĴqBMrR}PsOm[if&FX?<ӇU6?k^č' ;.g/Syqm(s==t~"Myr֠V *?X<ts'" Wwͫ^ =R=Üf#i "*Swe)DŽ O Fr`icߧ_|V'>ZX6$RF~c^e0́T0W.Ӽ6Jw]1uͯQ9LZZH#C}@MX^xo6}+1)UQfJ'N zoE;9;ͪ\(` gW|Ke>,+~ {+uB0V%b4y!ż;~i~(QC=%& +):6F\=[䰣ݒgm?q薌at%|@/a[̾+\tp9j ʜ3Ulx0E9FQ5ޫ;}  2"Gݬ&-/½*O3(EЕɖer!Q7ySJ:nlpJh&ߚ'+pyVT [r;“C8ۗ~[ 5D>UX 'WgSL ,RfuJ"-锢[#cvrw'O;e '~LcTT9CM˞CqIhyD e&>Sh+Wf/$/ZI-؄Xۼ(f1AC>o7 ,>_δ (:?_k³~E nGŽꞵ´I3da X'kXcFI0i1W:^'O&w/2Hx `T䵈>F Q)ѷ)3a' t5<{ 9Z_o,% u{c$0 ]/ݙ)ZLL Pv鳱tM@Qm3}qַo)Dϼfݹ/ft+|rsgF AnJlaNށX*~B1/vɌu"5oFSCS¤5I`@#$mFxh_d O *WB_O orGN<.֔$ ;-c<*EyYcv P)/CѿzoaiY=%c ܣ=֊?lWlpWO,^qcs@JNZ_TU!,ϕLa4(aYOw{Kڶ% 1^OϺ* G.  oh(S&56' w^TvSGĐڰueIJ]ef<"( ; ?_xAhM rZ<8@gHyd {-ӫ*vӑi5m$J1+]|͐!ztM^18IV|uYQaSL'r\=6.bv0z/EER* $Zڳ/Cv  9+JHSZ:]\Q_tH6k.%F4Z! 5VtW5H&26E"q_w#PeM.MԷGf&:נy!;i>MB(>e[3E}7kԥ WT^gWc 9,i_4> djYچ5R,7HMȶU^ySЦ;S_ yl:Q-HxyHCqPxT<34QN%Pꜳ%`c/*}͢*&d^^hdmLܦk%Uk7r"o=G #7KNXH=?V0F5k(9GYDTa!k?F^Wñ{ib*Q_jl Ix}$JF#)#&o?r+- dcf6̚m[5?nP;K/D5F6CY1qt|3ɦyLvަ`%=~A6bly+-lSPK<.ZY쥞!r I[ W1DNA]ݓ^IpO\|(ʆ$_HFΐ2%A]ߢp8 /Fem?df -;p#mf.$= >};G. @Աx<`na5Ԉ.@_5&~nؑO$fnA`%9X d8Hq)&PBA֩Iz/B#iv݆F/;m;mNOM ;RVnޤMqinǟ|u?vD5זޏ& , ¬Ӫj*a18/scՏhH=tDzxޭ̽z9?n-2Q2̜GUb)IR@_`Ѥ/1ҭiS:!bY^rh81-YGvc +9 HڅqNa)*u7vx3NJ#iR00֟! (ixb"/k(EQ.u\ ]%~g@ȴ%TO#+ɴ{ 7̵c}BOoPmgR t !O*l+[v@3w`xcQ7Ψf⑊$5aH(< 5fBtyҒ`Če7Lf6oA@}Z!Lje.Yf\F. C$}sT*_?. ʱ%s;6$(Ѷ{ 5 ~}*|JM}LuP*{:m1 cdB'b 6ԙwpSԄښm!X7a `p pv*$h\r{afޔ`m,[)9$Q ~s\י|D]gz00Fz2, Қ?df)uG4T}N@Iwc4w4{ .ebsN/; l~R4h(*M_-Z'Cov`y\|'9fdυ<>ncmTyEL6KGL1{ O227$b{|K}cQ_).6abv^y=/zu6,S<#>aE_5Β.,MjK\I*~CZK0Y+,iu "X7LD\hǿq~bfoi Y7FTZz-0EY>|u1@-=CaG:=Qz5{KgSa\y7ܩb2#c"bBgrmS4RFm* [v|ӝV]0R1,] 4mJ%ыrd ;)7k,c_mBRsA%F l*KQBFٱ}޻W D1yUayh#G>lWx>&bӦ4s.*mNb#]Mz]G-z0K "% ":iҶR}!B.V 63+}h:/ཱ F Q+ok}39y,Zvˤ.qPpNfn!sB(nȍgTCUb.5 BMzA҆L/dz/ǀjaRA:_aW/pP tf/~ݎ+,JwG6LN Y[g2Xkz>>-୩'$%h"T+ mM"7\$MVChf9UvҪ'q1j5޺|Ύס|c%J+gl!T=\c^SwF<§5H?w=`V?+k1DR5}&1EYMtu3=0r&~9oaycM*JM6_#JA{B\bV}Zi S]iw+Ti޵j̆ *O*4)!{s 2w#ΕMlr*j]6|_,bSZjQE|y L0/Ҍ $2/-\P"˛ T)4INn0D.=* hodݞ:ȫEmq0I`ZUv>cBmXGOA>E᪇aTvi"9QюS. mHJ򓘵D0z#; 32c'b}g<~pji"<)ctIpUZ3ݚLMB>Dv CfJzz7=yx O88b l @d>\tƮȥ'd.K)hjKpL>4Yit|GH1CS|2O Y T; ) gV Ō+݇Q5'L ՂI2"& FN45I^R).(,!X endstream endobj 548 0 obj << /Length1 2037 /Length2 14664 /Length3 0 /Length 15906 /Filter /FlateDecode >> stream xڍP۲-,4ݝ}pw 5[>dUU1G+  G:&zFn'L o3,*d!{M=Ndt0ع8̌\8d ,PQ8hڛd ̀'X@F@G1stf`pqq7vٛQ\@3WYkߕÒ+L] 쁀w=hx?$! ,w-w`gݿ"2W`bnȉJ;: l 4r8[K@T@`^s07utw0DhoYXdm qtK= "lTl휀y7ljd-_Nxy؂l&EM`= G{'?`FC w3o|{sW1ϗxl Ңr4W Acfc01q18?F2Jؘ\}QrPK>@!fdc4z+=_w sC:q{['ὗg;> .$ v|]Xwd5FNo4@+ved%Т.Fׅ`a&Fm_H_%?|Yg0y唹RRw35cps穃Tw>)e&BƱyKc%4E=v)i~֯J@?JCq%Ehzx̕s~L6E\i/ I]^%3":61ugƋy~J.ύL=^an_]CCAKjH/lDIllmɧ P ,pTҜdNiG@ I8tb]^Rܽ/meэ6Y&/u%xTzP8J;aXEn g ف` nیEJO&Q &-Bk\eI4rgωJ 7ˇE;)g07VY$˴(M84Tv5E:A{4j" ׼hM@/v@ I&Y/B槸qD¨3'[OD dzC!\:sj5f+rYܐ6Yn[N4vX bڝu*'QE~ ;.J(KOe$X>edR_9>$)RqZ =%6\ ` X89Fm"k!bJ"Ct 7%W |i YVv6l('8࢟= <Gț7{kH,֯p3pY R"NURF*5wz.,uIbb]^Dؘ}"uӪ z$sG\ΘJH<| ;[" ;ګ$S%0 e~/EO5219咗&é%191z:~wPV8=#Tj*9_fů.+.IOԳ{ @$U\gqnA ¿)HybD5HkA1tWy8'Dq |n[+[4)\B4E櫵[Lsm; H#Tֈ̱YvrJfR5^ͬEn+LYU )^Q 0>I􆹻,#rQҕK?$4fue647\i`SYm}xPG'2nH8_XAd*]4LC(qK@qh"{t񫟍O@i8H-itɤpA=Jܚc5T~T=gՓ7]ԗgN98klBnaQCQŢU*Nh9X6̱DBǮͳRqh`*нn][E@qHBS=[a@=BV9pXFjyn4v{}.yjZ|MBƊq(9w5Y{20]SOY:AX[G`cvZz %gU.؅)]9G$sY2_`ELD埧eY$S5H3 ٮ{_mYMMK`,q~A 짂J7AM~Dmy 0E0iFӑyBY CR cQ3rAu\]`edg>WeYcMp"BJCZcw{-q **tE_oE"%0ORm[L' T!0Ѱ롛v)LFͥLO!@H@IWwh*{QnG]K(F1>@$,.ޭɢ$(Mz!.wk‡̅#E3}–!D;Xv2s|0%`؀ke<-#z _J)ZJD`fa8Xւ}0tmRLl_/V{*0H@.lNLZt= aK4qE׼v|<=g԰,o \ae oF" ,.=A Vȥ0d\Rf$쑲2:!0OZ :ҜRSVXCC-{R?^W&8$׃cB|]?6@-G}ֳ4\[HYޞ]'$ 1G9Gz+tHQvPUà)іA ,ij`Gy>59ӑ;n&Lc ՂŒ6ˈً@H!|3?`7wlS3SWcr#^a].YK5h4%= Hy㧠o -^ D$W9vkD[Q)}|Oe/ U)EZjB47bRTE*KMYu' lws=51uk&݊:,}Uhߑd)ETTQ󐒓AȰɜ:"rizb~K!LM7!Ak'k!4,?Rp~j چG-'iSA:}y@$!̧-^!MKضwkƨF/F1'W٩_8~"||ܨgSPuOGJ㍳TSNQ1X AUƅSN aXoL!T9PA?1c_(,9\]^+0yW^{{y&)'1 $s@Ѻa(B4K%B爞Y+!en'G#qXA΂Du_$Njԯq%(5 wobnw"IN]=}mxXPn/p3`սʸZt?A$)SاMnLFy೴QJ:*Y; b NY Ǘ;d/ho!zec!&^BƎ?|-[T#@Ё(8g|ЈI94COȀbL .pa3UV>RXRI$1kyo.Ӛ$K8wnŴо-KU2pq{HPyq> v5"V ؍SEq5|dOmB^ZM)b bȞ 'b߈[CζT7`1Fižk-9\"].ʔMMA-=6'"HdfToo%d}:z@Nb̳Pu?9=b HqfV`8R&3D.o0ID$e[wlɿ,ڲ& h?`),s{/Q o?S׺ CboB?nI*wbBraEG)B+l:s3qS;|lҜo$e`8axy~:pT>18ȔWejG'l@=A+'!;Ȕ^6xaPvɄXғ-L* +5v2H!޷<5FUB@>ګ6+d" @Kfs; C!J¾5+BbdvǩՐ"5pbhg'~C6Ͻ5(L0L]:#T( F fdK=#}zb=2O6beLjo r:UÒ mЖ? Ugrpt̰WH•|n߾Ii#?ZleƨOQs8N =|mGA "yDDsf(8 f{* ka`HE}_9 XyMcH$ze,5e%_¸kES@5l?){?xfsHW›C#ex!)&<ۨX'bT-. ;7QPp,d0_K_'Gb7g7&7rBBdn8K1B=5/PVc(;'%U\7 !{(m7p:` opo~5\.}*z2u}ZЗ]9m.-giA|kC̺W^(ƚOA)6P좁EjWRŦw" P`7ݺyJѱaRMJHRuL9EFO'|(Ʈш"IkT?V<~nEXf ՉcC,kU$"ͽՐha<7ZÊ('75"45O6G qU0ŧ|gv#kIn&3f֣PF05:?l'+d@(J`^|߻`Io`#NtO,ۉ%%~]i6W?uas  MaJ15"v"uIY+tsh%lAᄼR':ODQ p"4 0K|fݵWe*i55*(eƑC‚AI^؝}* 4 7qwr|+8_lJ`UuTf&n3E)d^F$_RZȲf :RH.ۉE*\bB3 RŀwVwU0&LTs~~ZET."C oTi,:o&/C wPߖ4iQ*Ѹ"={KG휋lg񩔷g$oXUX\[<˝ .GQʱ!$ht,Nv /,3"MEAs]9odScXe Io_v7%m K7=5L\%x>PYEϑUH,ތ|GIĹ}+lbiSbTŷDa2+p ˜9fr(~cAǃ*EMDQ|Mce0r @j/Xi. "6N}IC'k7[U?ђp pC$Gf'Qq{ 31k.%v_S%`xMAW4*|ZiiiT4o Vm23(U. ϻfԥxJk9Jprh"t .GmBF*R22 sHaHh$V+輓 :xN2o;0jy6gBьOWEIPHpNt)q5qfk*98q2zGhr4ؗy$c}\=OBm''FlN?#$[lӂ5\94vDʕ8G p]ʙ-ZcLWt N[[r1 |~a͏lĐt:%ghoMTҖoKPY[$JL{B,(H'L"F0

˧Ï66`5{|΢PB+JQpsl]x_?ٺf֨aKcwDkkTq! "oOG$;Y.:8 az:Xgc?L3b\cNZFpےش.bg`Dh^Y"%Ս絬v BD ËLU'&1xlLAu SUf8g`/?ėjjP{&wuqn %<҅?p!]|GbfeEm&:3f{(K#<l63փaW^le P! ڂ?<@I57W,>{w9D0\ d/mS958T[DqQ9jY.xP^0\k&Dmw@pFP/yRUsO7M%ySIɾ 7ŊXd߶xy1ɖn^Ɋ6jnVZ EъNleI֫X:II(u=XwVuP~[-.dl˚@ˠ`ٕ< Y ?LȧQwicZ0Rf.8> s^ܡB+dCcYźh*q_N(f:znUIm yaI^MXAhpg8x0\uMܾp^ h-IZv\N `ӷI xJ!y CnOSbi˟QH$Uq^m88k!tHK;p;2d~N.}6ႍ`jͰ*.Ig?~:%*y3HfLSXRN%kq46Ưn3Oټ+{1JpNJj B60émR3ŴN*ǩS$:@/"1 ͞7|܁-Pw qvwbbsPvA2{l[~෫QxT+o,tW痧6Vu2P>0f#Vp@yc=^M+ %TL֑Ὠ|p :y՟upTDƓ{n>D6=Y]DF򊫢j啍z@AQ=(EbgfP'qM6FXǐG=B:Bǂ2uXij3Dp;niRm1aPAivo^w7O1fex?SN̉X,2,K0&K:~~5@K2H% C#K{='¸&æ}[v}@ZP  Se`( sXY*.(ƀKx^[b+XJܙ.P",acHY諌B65ˠ̔bC3BL ݛM7`ʀ@D*W<1rp6x391K2 ~,Kx7jQY$1FH3քRFn`{2CSeH?(eL>f h У#㍸t&/ϊ0O?Ï;9.R+摈a84٭C5o9C6|)iiZ])MuyS$3ːwnA`#@(ƴZ?'a EY$Fw ,^)l0:S"k5;-0} 72$ Vv 4sYZ%vE.ӱ7עfzFGn@-ѩS+ӺڕUNOVCkrKT{tKIQ^)fJ6ZAwQp JhX59-OGSK3s=e5^f2\Yp}#|kfIMPIҠBE_+e=n/HXUcmHLG١$RqrDLƖ^X@8Jp>vD%sT}'&mvg/9X!PT-V}}vw"K#&Xh$~ڮvBRG8H~v,7'?[o}ZSN'ZK5Jct9ě%8;k|P!3{d^1*DY-V\ID2m~W&e fRhctPnhg ӗX>R(>@W,ZTP".Li0 ֈUYkj ݷ>FZ߰(oKD1 D3#^"rQ5)B}V'a>; PghA8aW9,ˑww/g@giQbe0QK2𛉆$Gjs =56<"zBU.$$I_'WO2g&mKtS봨 ۚXi/O~A+@nje2G[p78 W,=q.]SvBu]o;~}jֲ8]nגO!Or_ ۧ!ڒևMD 5g[[52xD$jP+l UufISzD:)@jPrWoA|}3aʓmʕ[Ht2 ,BY0H^!#WiGDC[UUuZ/#ۣB-dOD,201}|3/ w1h cc~< hq:6a"8{q*DSM8@|Vm!QC ^cUKqǜg bB7 '!<si_-.ˈ|Wm62$F/p[XURs2 \Oi9EQd֓ =ϊXj[ZLJonư -- ȻD ~,.TZ.d gt"Bk~ΨjY^%틥⡻N=ܼYv8(U!N`DOֵ`Z\hǧ4=UqrQ:qa5aR&ێmy;BḬmliCPf}ѯU+oSHݶn}ӫON\χeninCB$HUUu$9OA^@{5fa-X d_fE<,%#epݲ߆wzlIlQImUrwq&Dp ե#.ԇx2aaMSkEimQ95ÎYm{~' IV,qF@.:):U<4U}Ѩ7K萻jWs&7.CGeˈRӖ}9UncLůڡ߱phۢ544%zwwSbcgM^-^-pKC^X˔)h,C;l/*{*=i—wB!F-',+%N%v?sQ .(_L)guz`0_]yD6E@B?Ý?U=ErU`6Fr=]Z' x*VLpA_=-̝S KyJ~&Va>} r9'^a9F @ik?=@jO,=-2S7|x1 :~0kޙDÜP6an _bV!&U6^Ɓڡ3i_Wha:D݀"j L~VyZM:(cСpcm̵SLxI!A0}@v)OyAܱ~["JvHSNO2˓r’--;jBg=MwG2VLUCX5aʔ5^쵼ͨOIqRZm4AH3I Rg'E\XQs6l!PK 9!B-o+Xb4A'%BFҘ8vܘCR͂Q2Lj]zދUH*8JhWp ʹNKvuE\Ȉ7B:Lf'[Y5, qJEv%o߂-ek[(Z~v1 J 2smA^ȶJk6]obE\S'$8 &Bx;07O&.jv'NX@#ӫn *~L&(ZU7&RPG\z.{iGIƥB] H f|gy3vtk75v~`]-̥^J+GfH0YqŁT_Q6#/L'X[q.6L~l (Q' O;3E\ChH5'Kl{u $X-hmaE*RVT̉! #n.D+me R endstream endobj 550 0 obj << /Length1 2171 /Length2 8362 /Length3 0 /Length 9640 /Filter /FlateDecode >> stream xڍuTlۿH(fctHNaNFn$DZIT>wYtydlV0E$ ȩ@0/Oʪww-&`5Hr(-ԑ@b@ !%zmU$F*tAin sr8Ppk(u93ZCHk8_!%]xnHns._JO1? ,n CA=aw,Oſ[`vph1/>|` D9zl'ΗOGND믎"~<~0?@P@ $* w-("=UH_IO?߫w, $zfa#nZ@AQ=~5zd=㯎D/M a:Zw(z dvNwS{l/s#`ZH7;K+kGᆞ_*zmQa_B( !Lj_PB/ xHw ]"yB>_HS>H#Q?H=:o$@ABhRgSX:kH~g`up$w@otݶ]u=S@6ghf8 t# ]C3_jt3.`.GMo)_ vA/.O s-kK rב 3GXr-Ϧ.1R5;S>N!I'SdMLC ؃NO}iĎyv[{_'[E"i|VbŊF347/;ŷǔB -zT F̐:>p,"jUw72uZ!Y_FNzn_>(*~R +QO2CcaWjcN4"J=lG|:^U Kr/4pNõ_# ƇֲXU0S+V=M''lj{UԇJp*1 .78!W% qJV@H^I!pcHiL@dt̵:A\hCGMBL?_~R.9r+>.>c yQ^.ݧn|4wS 9I"!-!ʡJ%I7^3QhB*Չ_/Z9ѐHWyĵq4,°޳3YN^z4\#Ԭks2'١ceLz0m*2/>b![y4o4p2Yxۃ>{Yt0ZiCƷPA!{ݣOԘ\*v8>NVg'2XUō|5Pe)'ۙƞ"%y%ĽE|Ndye Mׄyl=Cf圱4+ ux[d#K=Ċ hup/%2Tk ;[ k8’0+g_?A}U'=_>tK|sWБnOcPwX)N#c"3d*[{Ha"B:!C#g,)4 Up D"A|mw)RE<V \hdWצ.fЦi^F88iX @+,=2magcK+;[Zio.? (+4strĝ3-&DFOd&nuA؂%c} P#jgJp4"]"L*SL,p4 [&)Oȅ9;" 7_8I[}m^zƅa2ճM"M"ι̎qv7S$vYP+>g P9O͆;\|ZJ鈛Z<j6䨍JA Pf.2v'}tQ+m=d{fMHQJ%u5\2T?+1SܔKo?PɆzLubTpA/Ei+~]n=?_os/)h܍LtZG휓z}ǓMopBRwQNkᡲ@xh#bNQ⛅EiDMi-"5ieql7p\[X+>evaa^.F6,MH rp}JLuWӂw'MK>f^[y0UTjt}fE(SKˀV+`RK U.g|ě@R(YI(mEq;T(a`q|xJ!0.B[B'>𶯀$9YţqORT#)K(U\^TCfi/UƥMziYX __j=QRyg"hanyjVd&ӧ*iׁ oRh@A׍O-3 CkNc8S?%1^Ҟl 23E˛2Joyw p+TcSnherk`,:<^EZ?Bx8 i'1sB'->y2dWYQ_.\M{}y^7虐喝|KgHAd봞'0H?6ҍdzOԈ؏cJy;2]$/ _}L0p$Xt5b=~:6cUYn:?sEo>eq]0^Βlcbosxhɏ]EA<6voqOmhZ&?fR7i( w]ìi|y> |qwfYECIB "|bH,X5gN]"ȸn5Յo2c;Ɲod!. j7=O76V56e1PYQɂswog_wBxsZ۟qB`]kb'`SWZݬx.y/4jv3(qkbZPe 2`%)Є*5=9'jZ]IjA|Fr{g|ͩ"rECݯo3XӘ/>yb &. oكD pf!;~vP\ک@BIf[支YbG60 )ga4ߪ+b&"_Dҿ|~i)._KL}[ي@}圓Wsz &g ~a)$F{)*n٨,Lpu|D)wxi_DDbِKj[UN嵴?5!>3:?3VkPv{I.lA|9<=O1@wL(03p9DG}@ԧ{7Y}P9ߙTA:`gkLlѹ~a@x>K1Lk,hTƑ\)O% v tj\t0R|3 xtne|xOdu`Lk..KTD{8zg%Y3 g49yI1SZ]?޿KQGYNv(z7]@;o6,q# 2%1Bgg[^Ü:SHd /k_CTL r~ -5mK'j\SmNdClp: [܀\ZiX"Mm;.%oI֕vS)*hs] LT[hcqj^ڐ_(=)6AfszAhpt]]7hQR߿R\R 1T9o)9bȌ1[K :5 -FL٧='k64" #rC=8T yvxiQFx5  ~۞\r%پ͏Q4nʊ,*7hBr4̶>+{#΀i,E n\ 9*fDMErIƽYu=ސDRi͉^vvoٖE.f U b=o(l n69z|^V;=aCA{K!%>?J#v;{Oi4;ۄ^?Mr͕pM/U]kݦ ]/|'yd U6~(3\Xwe;MކEig"kV}(匵;{އl"mIez6%!>qMCW\~5A_uxoK` ơU%Tz}s~G^[<=_^i8oMymә ;DGtFVi A1|el@GUi>3ԹQ! x3h~u;:Һ:/߅1=)K73[}C1jHsfn\ }mu0+wtdK#I"nAgRtw^` BxAMdH5p&vB$|ϼDJ7d'H;N xL MF;"~-4М!!l &l鋱Z̳+{I2wծu'&na`M)hyD׫E&'<$[oNeC_>xY"͛Ɨ;y5*zhW.L1WUk&;B2?6]8sҲgbR9753pL{ 6G3(=eB& ɜ~}ܕ =1U,'3m<l~RL@"ǓȗwBcb[@6Nr9 esfظ~"+C T4I6*O lV6pi,?<7لB"Krm_n"~?tyIt ܘܚbm"%6^P%{yXvOG3G ]݃(]ǩ$AwTݟ9+M܄o@Hoxv+m+Mk؞<>bɔmQrft)0Y-\5X+{s}x Yen2|bfX=% !e+ }mDMT*BN $}5B=: &lb&AGVyҹI(M7 "elwLDusYn-+A_S—e[NIνMfO^3{S3<ղQ%~pvƵO$x̄n9]b{uG6>X#$=#r)ngvgryߜc nE' ϐ||j[-W}=MTc̊DLaf )eCr"첨e[ :?aZو%v4xyN|5N+H_vΒ/( j#b?{B_P'8HA'M 7ՏS83|yD]pӸ ҭ^e=Ɣ8bX5@%DiWhlp9|bSQ>)ԡ-13ZfdRީѿ}WUݷ[Drt$G~5`Ǒo@M%W/#1@{7 }ʻCՊ,±?.-I^k] ELUOBs1^g *$XBg];h*smRɤAyNReJ}ICPhT9'};$HNzL%d6W @|4+F`%`z1<6 +SU蛥$asLz4J$`PyU^7@E 7 El۽\3KqWFJm%*,a c{ޒt tRSq+hz "; r;aW+a kyGy,SO+_Rti#=ۨ䛋%L#J"6*fIG)R 1_$! ]WNMMF,WX&4u 4:,WBHH}:JT;aЧjp0D'HSoĮQF0έRO6^$??;۸gIPD8olKnu3|f)e}н`&W'++CZ'j_4,r.ampr΅7kir7LB&}eCB ~&z !#s;ɤb3ZpB99ijvT=wqVfl" Sp+P}(~l6Cqh5I-g)^ R:& 01dV [^\^mddMK)D~ᰞ\U5ʀ(6e"7sÔ 1Ŋ 0^SوK l:D ">"x`~#Z؆'=e`Pc_/NEPk3?gmёiuڶ$Iyu`q U3«&ֿS[DWG P qWjU 7ҞWT>,RG^RZvHZ*V&.$cp?hCFJ iK`ͣIR#$~ K@VYT2Gd _qxC:3@[`LH8 Mp/2AV܍mn{XG%d&7 װ>E}Z KGdB2MM^(;jƂDsYv#c(NA[P.4gO?*{]C&%4-ȟaGcڵRa=q@qGDg-yI&،Z8Ht[ATL9'cSֶ =k0%x-{ap.ah,4 OH1\/|L9صasC,p/OןH/," fRo>]v5ʧkrqQN!J$|UvCzgsNxS4ΦB=9|s\YDb+* UZ |T"?Lmq-rrm|o0٪cf ,ǁ SkҟzwŽ[&ugJ/{&̽# 8 ﰯ=,[3]Yϳ76JVaF~5VGڟqRf^X֭9si J5h(6*E}f7'o*t{`~54d,KgneDTaxhRD̔|ork cl=uW$׻kvL.:aP_@U!e@qӅ/2?O\ v.{lZLL0Jj W Rm7RLJhȊ½}Ȧ$:egЭyuD"h&K:ңZ9E7Z! ~>)%$7iY5 Au\A}޽k&iT'Qژc 7e%0yezֶ񛛳=PC XSLT_@!jt]I;G;}M;Z>Ák^T4Jҩ?UO bi*@V #P"򀦛{}Y㧎Gm~EhÎN7ب p&hA2ٌ}?YxR uuIo}5;{_Ǽxlj/Yn ۩CDx݀PdiM)% XU2S{vM&hE~L֧~ dSwn-/^5[1&FW4z>9SxYj'|0<+G*p'/]~6祡l2%w3kol^clgz-u6c+/dhmŔšiPqcz-qSu6"-.|N36~Dmқ8NIՠŝ%ڛeӓ!sqY\4)i=?cxmkdSmKҌJ[o#/0݈AXLTGcZ{a^9JD7IjvXg0!BWU:+}'En'ZIҞbU,D:Чi@}J,5@H6w;q߁ V*_9#d[H$b`$/6zpI~~DRyGv0ItY" na~൰)P)sӶ:%/󕺚k5ok,6b@T)#w1%o8Ӻ]Fw6 #Zuz>0ʹFnXN~Z$}DяͻG045c.Ӳ\e0ddȫUZaۄY:$3}U< ?tB4Cw2yvɹG]s(HcS {YZ"#2CpwS`1 8$Sl!XE(Z``}}a]_ȍsTo`uMDd_VDQ;NyO"jԿɏQ?!y^ l@db$NB0C3݀j^lvdGʢ gyNo9Y\:`9}n6akv6HE<Mss<C_|.+i~QbvAj1& RP1C!tP:v~YbtXSTJhP#՘ҢWtRfu ]|@뷘4A)N/]\TʪY_ަgKןW#SЄW'Ioză0,aE2.[0Uy4:wpd~SnrƋXjȭښAnힼ>?(MZzs8tu0^fˬ?x $iʊA_euT\[NDެXWV!iAH$'.7r<{Eh={ر$CiUhHB-A3V-n ת7ӟlP0,TZۮ^dSHX̽ߤ& !*0 "; تrt*Od!!zn{DZ'ĥ ;10͊e}uz2iš1ͣugkvo9k3xe^ eGC -Kʆc>ɱiv)Gr ~/AX5hb <`Cav*J]i2Ͳ3C! |;BT)@6I*8|vQ0`k!X4`z.\|H[sF.LH JPa!Ʀ1{v{Q?֔?nz@t$EXIP/k_L ҹxBA8П!U[ZZE>Bkzu~jQiMtOfߔ>Ep-OVuV`M @rLMId b֕kWg`:Ո92Ѻ!ha eg<7Ϭ69>"r)ϴlpkoI*φ Ae-[?oTKNٟBfqL0 Ns͈VC$+UMqYnZ? aXUAO Ft@93` d;ˌ&`|y)CILqfC?%kkR/T 0WKsnzܼ)+&%. o;d$w=>Л@⨴9VO.th>na(@Qx-e߃QvaKJ۹Xu 'k>a9xufdRtU %~&($h*8?ANȭ/jIsoɿ P&jvtL[}ݡ&|[7A*uLߧ.Ϧkzjþp2!|j=6C0Z%p!8F$9|JYNgi܍Iߙv}"%kB~ ڎbaP\ [SDDizdh_nlby63.] cʡkIzk('?;4A8 t/2[KC. Yŭ( 7DшW~l;TtTaR:a'ǐ:9Feq+=kZp | !3plX`p0'Jt U h+~St{,a丏Q PVh^" әW'Q8oUH5. \{iFՐ_oh9SvGN0ۻCZ^ jhoo#IE7opҹ 9[:)W+e; BF؂`|XU m5őASGC%Jh]f١ksGD)~Sލָu ߙu3Yym.0o5Oii>U9lKnBЏq󡾷!ID>PCʓixSg%A(P&≡j FK/C#ߨqJ!Sw)Oޮ ' wyy kqv!ujboD8dW)A4IxFWi9y ``!p={xlfGr}M wqwb (9ZS[a5|ˈT!Kjs }}厳ߎxBz=9XvWvYn(㬅6ک,23OQ =AFedrՁiPr +P'nû唼#yr&u!`(%эe1߈:)=C|Zv("+rݗ5:tg'!JYsXhr6Z} ŝ[$3 qZ]"uNQNoo@,^(pBp}0p8U3G/R̗}iEp2e@Y]=DoO]V6_Qk=fk+eK`6*0aUlpYc\̄g.?%ẏ+#yBy$Dc)n.)ms\O#BZuLA? GR?U /,>xz&l@ΕtM> 0N FB𤠥3A|x¬-FÀݮ>[ rZC,* څĎ ojtfR ]ORo+7àGA-#xE5": ߷q+N֠| ?BVnAG,#_̸Dn$ǺP. 1f^'kj4&6Ym#ǫ&8) "WcTB; hpM7QrBsxL\nG$ `s&)igO-L E@p#)v?J1 sc3bZe" w*%nS }'0U1r`(FxJ'.v3nP &,="!2-gMMT |%rm8yhIVKfTρv(d}xuۡzU$4Y){H)E2o=%M *)ʋTn9Lr P/PւXo;b/leF1.?@)w)$m1 ni>gHOe籞S71GZ$' 4r@3Z YLL|ȸda R1*`fc+D6j+ %9 3^3@8"w rr y*}B1ㅭ\As4‹yА$Ĥ[5[#i5{KӑPdt%#FB%6|h(J+hiThz!fLV HZIa{ %βT4|aIpq7 HG g6{1@䱗Ky ByS1…W}|۲H?h>$quD,p8^̃\xnb‹SWEgYZonedrive-2.3.13/tests/makefiles.sh000066400000000000000000000034511360252424000170520ustar00rootroot00000000000000#!/bin/bash ONEDRIVEALT=~/OneDriveALT if [ ! -d ${ONEDRIVEALT} ]; then mkdir -p ${ONEDRIVEALT} else rm -rf ${ONEDRIVEALT}/* fi BADFILES=${ONEDRIVEALT}/bad_files TESTFILES=${ONEDRIVEALT}/test_files mkdir -p ${BADFILES} mkdir -p ${TESTFILES} dd if=/dev/urandom of=${TESTFILES}/large_file1.txt count=15 bs=1572864 dd if=/dev/urandom of=${TESTFILES}/large_file2.txt count=20 bs=1572864 # Create bad files that should be skipped touch "${BADFILES}/ leading_white_space" touch "${BADFILES}/trailing_white_space " touch "${BADFILES}/trailing_dot." touch "${BADFILES}/includes < in the filename" touch "${BADFILES}/includes > in the filename" touch "${BADFILES}/includes : in the filename" touch "${BADFILES}/includes \" in the filename" touch "${BADFILES}/includes | in the filename" touch "${BADFILES}/includes ? in the filename" touch "${BADFILES}/includes * in the filename" touch "${BADFILES}/includes \\ in the filename" touch "${BADFILES}/includes \\\\ in the filename" touch "${BADFILES}/CON" touch "${BADFILES}/CON.text" touch "${BADFILES}/PRN" touch "${BADFILES}/AUX" touch "${BADFILES}/NUL" touch "${BADFILES}/COM0" touch "${BADFILES}/COM1" touch "${BADFILES}/COM2" touch "${BADFILES}/COM3" touch "${BADFILES}/COM4" touch "${BADFILES}/COM5" touch "${BADFILES}/COM6" touch "${BADFILES}/COM7" touch "${BADFILES}/COM8" touch "${BADFILES}/COM9" touch "${BADFILES}/LPT0" touch "${BADFILES}/LPT1" touch "${BADFILES}/LPT2" touch "${BADFILES}/LPT3" touch "${BADFILES}/LPT4" touch "${BADFILES}/LPT5" touch "${BADFILES}/LPT6" touch "${BADFILES}/LPT7" touch "${BADFILES}/LPT8" touch "${BADFILES}/LPT9" # Test files from cases # File contains invalid whitespace characters tar xf ./bad-file-name.tar.xz -C ${BADFILES}/ # HelloCOM2.rar should be allowed dd if=/dev/urandom of=${TESTFILES}/HelloCOM2.rar count=5 bs=1572864